In [1]:
# %matplotlib notebook
import pandas as pd
import numpy as np
import os
import sys
import time
import pickle
import seaborn
from scipy import stats
from sklearn import svm, preprocessing
from sklearn.utils import shuffle
from sklearn.ensemble import RandomForestClassifier, VotingClassifier
from sklearn.naive_bayes import GaussianNB, BernoulliNB, ComplementNB, MultinomialNB
from sklearn.neural_network import MLPClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score, confusion_matrix, roc_curve, f1_score
import matplotlib.pyplot as plt
In [2]:
df = pd.read_csv("dataforproject.csv")
print(df.columns)
df
Index(['Username', 'Major', 'Specialization', 'Department',
       'User Profile Link', 'Term & Year', 'UG College', 'University Name',
       'Program', 'Research Exp', 'Industry Exp', 'Intern Exp', 'Journal Pubs',
       'ConfPubs', 'TOEFL Score', 'TOEFL Essay', 'greV', 'greQ', 'greA',
       'gmatA', 'gmatQ', 'gmatV', 'Topper CGPA', 'CGPA', 'CGPA Scale',
       'Admission'],
      dtype='object')
Out[2]:
Username Major Specialization Department User Profile Link Term & Year UG College University Name Program Research Exp ... greV greQ greA gmatA gmatQ gmatV Topper CGPA CGPA CGPA Scale Admission
0 ninny.rgs Electrical Engineering NaN Electrical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 A C Patil College of Engineering New Jersey Institute of Technology MS 0 ... 430.0 740.0 4.5 NaN NaN NaN 72.27 72.27 100.0 1
1 ninny.rgs Electrical Engineering NaN Electrical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 A C Patil College of Engineering University of Southern California MS 0 ... 430.0 740.0 4.5 NaN NaN NaN 72.27 72.27 100.0 1
2 ninny.rgs Electrical Engineering NaN Electrical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 A C Patil College of Engineering Worcester Polytechnic Institute MS 0 ... 430.0 740.0 4.5 NaN NaN NaN 72.27 72.27 100.0 1
3 aashishg11 Electrical Engineering Automation Instrumentation http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 A C Patil College of Engineering Worcester Polytechnic Institute MS 0 ... 144.0 157.0 3.0 NaN NaN NaN 73.00 56.11 100.0 0
4 prnk_us Computer Science Security IT http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 A D Patel Institute Of Technology Arizona State University MS 0 ... 149.0 157.0 3.0 NaN NaN NaN 8.50 7.71 10.0 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
53638 vins747 NaN NaN 0 http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2012 NaN Worcester Polytechnic Institute MS 0 ... 149.0 158.0 3.5 NaN NaN NaN 0.00 6.60 100.0 1
53639 ameed.haikal Biomedical Engineering NaN 0 http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2014 NaN Worcester Polytechnic Institute PhD 10 ... 155.0 160.0 3.5 NaN NaN NaN 0.00 6.34 10.0 0
53640 downing Electrical Engineering Power 0 http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 NaN Worcester Polytechnic Institute MS 0 ... 490.0 790.0 3.0 NaN NaN NaN 0.00 0.00 0.0 0
53641 fancy3361 software engineering NaN 0 http://www.edulix.com/unisearch/user.php?uid=2... Spring - 2014 NaN Worcester Polytechnic Institute MS 0 ... NaN NaN NaN NaN NaN NaN 0.00 0.00 0.0 0
53642 saohua Computer Engineering NaN 0 http://www.edulix.com/unisearch/user.php?uid=2... Spring - 2014 NaN Worcester Polytechnic Institute MS/PhD 0 ... 146.0 160.0 3.0 NaN NaN NaN 0.00 0.00 0.0 0

53643 rows × 26 columns

In [3]:
df.dropna()
Out[3]:
Username Major Specialization Department User Profile Link Term & Year UG College University Name Program Research Exp ... greV greQ greA gmatA gmatQ gmatV Topper CGPA CGPA CGPA Scale Admission
15914 aditya57 Computer Science Systems Information Technology http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2014 JNTU North Carolina State University MS 0 ... 163.0 166.0 3.5 5.0 50.0 39.0 85.8 82.3 100.0 1
16149 aditya57 Computer Science Systems Information Technology http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2014 JNTU SUNY Stony Brook MS 0 ... 163.0 166.0 3.5 5.0 50.0 39.0 85.8 82.3 100.0 1
16507 aditya57 Computer Science Systems Information Technology http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2014 JNTU University of Massachusetts Amherst MS 0 ... 163.0 166.0 3.5 5.0 50.0 39.0 85.8 82.3 100.0 0
16806 aditya57 Computer Science Systems Information Technology http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2014 JNTU University of Utah MS 0 ... 163.0 166.0 3.5 5.0 50.0 39.0 85.8 82.3 100.0 1
16829 aditya57 Computer Science Systems Information Technology http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2014 JNTU University of Wisconsin Madison MS 0 ... 163.0 166.0 3.5 5.0 50.0 39.0 85.8 82.3 100.0 0
16838 aditya57 Computer Science Systems Information Technology http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2014 JNTU Virginia Polytechnic Institute and State Unive... MS 0 ... 163.0 166.0 3.5 5.0 50.0 39.0 85.8 82.3 100.0 0
25106 Niteshd7 Information Technology MIS 0 http://www.edulix.com/unisearch/user.php?uid=2... Fall - 2015 Nagpur University University of Arizona MS 0 ... 153.0 156.0 4.0 5.0 44.0 33.0 76.0 62.1 100.0 1

7 rows × 26 columns

In [4]:
# df = df[~(df['UG College'] == 'NaN')]
df = df[pd.notnull(df['UG College'])]
df
Out[4]:
Username Major Specialization Department User Profile Link Term & Year UG College University Name Program Research Exp ... greV greQ greA gmatA gmatQ gmatV Topper CGPA CGPA CGPA Scale Admission
0 ninny.rgs Electrical Engineering NaN Electrical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 A C Patil College of Engineering New Jersey Institute of Technology MS 0 ... 430.0 740.0 4.5 NaN NaN NaN 72.27 72.27 100.0 1
1 ninny.rgs Electrical Engineering NaN Electrical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 A C Patil College of Engineering University of Southern California MS 0 ... 430.0 740.0 4.5 NaN NaN NaN 72.27 72.27 100.0 1
2 ninny.rgs Electrical Engineering NaN Electrical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 A C Patil College of Engineering Worcester Polytechnic Institute MS 0 ... 430.0 740.0 4.5 NaN NaN NaN 72.27 72.27 100.0 1
3 aashishg11 Electrical Engineering Automation Instrumentation http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 A C Patil College of Engineering Worcester Polytechnic Institute MS 0 ... 144.0 157.0 3.0 NaN NaN NaN 73.00 56.11 100.0 0
4 prnk_us Computer Science Security IT http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 A D Patel Institute Of Technology Arizona State University MS 0 ... 149.0 157.0 3.0 NaN NaN NaN 8.50 7.71 10.0 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 airyuanye Industrial Engineering Simulation&Modeling; SCM Dpt. of Mechanical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2012 Zhejiang University Texas A and M University College Station MS 0 ... 680.0 800.0 3.0 NaN NaN NaN 3.71 3.68 4.0 0
51320 jhstjh Computer Science Game Development / Graphics Digital Media Technology http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 Zhejiang University University of Pennsylvania MS 0 ... 154.0 169.0 3.0 NaN NaN NaN 0.00 87.00 100.0 1
51321 airyuanye Industrial Engineering Simulation&Modeling; SCM Dpt. of Mechanical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2012 Zhejiang University University of Southern California MS 0 ... 680.0 800.0 3.0 NaN NaN NaN 3.71 3.68 4.0 1
51322 jhstjh Computer Science Game Development / Graphics Digital Media Technology http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 Zhejiang University University of Southern California MS 0 ... 154.0 169.0 3.0 NaN NaN NaN 0.00 87.00 100.0 1
51323 airyuanye Industrial Engineering Simulation&Modeling; SCM Dpt. of Mechanical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2012 Zhejiang University University of Texas Austin MS 0 ... 680.0 800.0 3.0 NaN NaN NaN 3.71 3.68 4.0 1

51324 rows × 26 columns

In [5]:
df = df.fillna(0)
df
Out[5]:
Username Major Specialization Department User Profile Link Term & Year UG College University Name Program Research Exp ... greV greQ greA gmatA gmatQ gmatV Topper CGPA CGPA CGPA Scale Admission
0 ninny.rgs Electrical Engineering 0 Electrical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 A C Patil College of Engineering New Jersey Institute of Technology MS 0 ... 430.0 740.0 4.5 0.0 0.0 0.0 72.27 72.27 100.0 1
1 ninny.rgs Electrical Engineering 0 Electrical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 A C Patil College of Engineering University of Southern California MS 0 ... 430.0 740.0 4.5 0.0 0.0 0.0 72.27 72.27 100.0 1
2 ninny.rgs Electrical Engineering 0 Electrical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 A C Patil College of Engineering Worcester Polytechnic Institute MS 0 ... 430.0 740.0 4.5 0.0 0.0 0.0 72.27 72.27 100.0 1
3 aashishg11 Electrical Engineering Automation Instrumentation http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 A C Patil College of Engineering Worcester Polytechnic Institute MS 0 ... 144.0 157.0 3.0 0.0 0.0 0.0 73.00 56.11 100.0 0
4 prnk_us Computer Science Security IT http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 A D Patel Institute Of Technology Arizona State University MS 0 ... 149.0 157.0 3.0 0.0 0.0 0.0 8.50 7.71 10.0 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 airyuanye Industrial Engineering Simulation&Modeling; SCM Dpt. of Mechanical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2012 Zhejiang University Texas A and M University College Station MS 0 ... 680.0 800.0 3.0 0.0 0.0 0.0 3.71 3.68 4.0 0
51320 jhstjh Computer Science Game Development / Graphics Digital Media Technology http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 Zhejiang University University of Pennsylvania MS 0 ... 154.0 169.0 3.0 0.0 0.0 0.0 0.00 87.00 100.0 1
51321 airyuanye Industrial Engineering Simulation&Modeling; SCM Dpt. of Mechanical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2012 Zhejiang University University of Southern California MS 0 ... 680.0 800.0 3.0 0.0 0.0 0.0 3.71 3.68 4.0 1
51322 jhstjh Computer Science Game Development / Graphics Digital Media Technology http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 Zhejiang University University of Southern California MS 0 ... 154.0 169.0 3.0 0.0 0.0 0.0 0.00 87.00 100.0 1
51323 airyuanye Industrial Engineering Simulation&Modeling; SCM Dpt. of Mechanical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2012 Zhejiang University University of Texas Austin MS 0 ... 680.0 800.0 3.0 0.0 0.0 0.0 3.71 3.68 4.0 1

51324 rows × 26 columns

In [6]:
df.dtypes
Out[6]:
Username              object
Major                 object
Specialization        object
Department            object
User Profile Link     object
Term & Year           object
UG College            object
University Name       object
Program               object
Research Exp           int64
Industry Exp           int64
Intern Exp           float64
Journal Pubs         float64
ConfPubs             float64
TOEFL Score          float64
TOEFL Essay          float64
greV                 float64
greQ                 float64
greA                 float64
gmatA                float64
gmatQ                float64
gmatV                float64
Topper CGPA          float64
CGPA                 float64
CGPA Scale           float64
Admission              int64
dtype: object
In [7]:
df.describe()
Out[7]:
Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA gmatA gmatQ gmatV Topper CGPA CGPA CGPA Scale Admission
count 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000
mean 0.346037 4.159886 0.465942 0.040020 0.051087 101.798847 5.466324 321.479951 419.027336 4.872961 0.106353 0.117255 0.074585 36.753957 40.286508 56.643695 0.514671
std 2.617488 12.165493 2.491681 0.294584 0.344925 43.859469 10.796335 216.072739 322.024787 36.547059 6.960437 2.610511 1.915089 37.481102 32.334540 45.379822 0.499790
min 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
25% 0.000000 0.000000 0.000000 0.000000 0.000000 99.000000 0.000000 151.000000 162.000000 3.000000 0.000000 0.000000 0.000000 8.700000 8.252000 10.000000 0.000000
50% 0.000000 0.000000 0.000000 0.000000 0.000000 106.000000 0.000000 159.000000 168.000000 3.500000 0.000000 0.000000 0.000000 9.650000 57.000000 100.000000 1.000000
75% 0.000000 0.000000 0.000000 0.000000 0.000000 111.000000 0.000000 540.000000 770.000000 4.000000 0.000000 0.000000 0.000000 80.000000 72.000000 100.000000 1.000000
max 53.000000 138.000000 96.000000 12.000000 8.000000 1350.000000 60.000000 5560.000000 7990.000000 1470.000000 740.000000 168.000000 152.000000 100.000000 102.000000 100.000000 1.000000
In [8]:
le = preprocessing.LabelEncoder()
df['UG label'] = le.fit_transform(df['UG College'].astype(str))
df
Out[8]:
Username Major Specialization Department User Profile Link Term & Year UG College University Name Program Research Exp ... greQ greA gmatA gmatQ gmatV Topper CGPA CGPA CGPA Scale Admission UG label
0 ninny.rgs Electrical Engineering 0 Electrical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 A C Patil College of Engineering New Jersey Institute of Technology MS 0 ... 740.0 4.5 0.0 0.0 0.0 72.27 72.27 100.0 1 0
1 ninny.rgs Electrical Engineering 0 Electrical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 A C Patil College of Engineering University of Southern California MS 0 ... 740.0 4.5 0.0 0.0 0.0 72.27 72.27 100.0 1 0
2 ninny.rgs Electrical Engineering 0 Electrical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2011 A C Patil College of Engineering Worcester Polytechnic Institute MS 0 ... 740.0 4.5 0.0 0.0 0.0 72.27 72.27 100.0 1 0
3 aashishg11 Electrical Engineering Automation Instrumentation http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 A C Patil College of Engineering Worcester Polytechnic Institute MS 0 ... 157.0 3.0 0.0 0.0 0.0 73.00 56.11 100.0 0 0
4 prnk_us Computer Science Security IT http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 A D Patel Institute Of Technology Arizona State University MS 0 ... 157.0 3.0 0.0 0.0 0.0 8.50 7.71 10.0 1 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 airyuanye Industrial Engineering Simulation&Modeling; SCM Dpt. of Mechanical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2012 Zhejiang University Texas A and M University College Station MS 0 ... 800.0 3.0 0.0 0.0 0.0 3.71 3.68 4.0 0 1401
51320 jhstjh Computer Science Game Development / Graphics Digital Media Technology http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 Zhejiang University University of Pennsylvania MS 0 ... 169.0 3.0 0.0 0.0 0.0 0.00 87.00 100.0 1 1401
51321 airyuanye Industrial Engineering Simulation&Modeling; SCM Dpt. of Mechanical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2012 Zhejiang University University of Southern California MS 0 ... 800.0 3.0 0.0 0.0 0.0 3.71 3.68 4.0 1 1401
51322 jhstjh Computer Science Game Development / Graphics Digital Media Technology http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2013 Zhejiang University University of Southern California MS 0 ... 169.0 3.0 0.0 0.0 0.0 0.00 87.00 100.0 1 1401
51323 airyuanye Industrial Engineering Simulation&Modeling; SCM Dpt. of Mechanical Engineering http://www.edulix.com/unisearch/user.php?uid=1... Fall - 2012 Zhejiang University University of Texas Austin MS 0 ... 800.0 3.0 0.0 0.0 0.0 3.71 3.68 4.0 1 1401

51324 rows × 27 columns

In [9]:
df = df.drop(columns=['Username', 'Specialization', 'Major', 'Department', 'User Profile Link', 'Term & Year', 'UG College', 'gmatV', 'gmatQ', 'gmatA'])
df
Out[9]:
University Name Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA CGPA Scale Admission UG label
0 New Jersey Institute of Technology MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 72.27 72.27 100.0 1 0
1 University of Southern California MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 72.27 72.27 100.0 1 0
2 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 72.27 72.27 100.0 1 0
3 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 73.00 56.11 100.0 0 0
4 Arizona State University MS 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.50 7.71 10.0 1 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 Texas A and M University College Station MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 3.71 3.68 4.0 0 1401
51320 University of Pennsylvania MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 0.00 87.00 100.0 1 1401
51321 University of Southern California MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 3.71 3.68 4.0 1 1401
51322 University of Southern California MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 0.00 87.00 100.0 1 1401
51323 University of Texas Austin MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 3.71 3.68 4.0 1 1401

51324 rows × 17 columns

In [10]:
df.describe()
Out[10]:
Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA CGPA Scale Admission UG label
count 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000 51324.000000
mean 0.346037 4.159886 0.465942 0.040020 0.051087 101.798847 5.466324 321.479951 419.027336 4.872961 36.753957 40.286508 56.643695 0.514671 713.460584
std 2.617488 12.165493 2.491681 0.294584 0.344925 43.859469 10.796335 216.072739 322.024787 36.547059 37.481102 32.334540 45.379822 0.499790 430.167158
min 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000 0.000000
25% 0.000000 0.000000 0.000000 0.000000 0.000000 99.000000 0.000000 151.000000 162.000000 3.000000 8.700000 8.252000 10.000000 0.000000 354.000000
50% 0.000000 0.000000 0.000000 0.000000 0.000000 106.000000 0.000000 159.000000 168.000000 3.500000 9.650000 57.000000 100.000000 1.000000 702.000000
75% 0.000000 0.000000 0.000000 0.000000 0.000000 111.000000 0.000000 540.000000 770.000000 4.000000 80.000000 72.000000 100.000000 1.000000 1059.000000
max 53.000000 138.000000 96.000000 12.000000 8.000000 1350.000000 60.000000 5560.000000 7990.000000 1470.000000 100.000000 102.000000 100.000000 1.000000 1430.000000
In [11]:
l = len(df)
df = df[~(df['CGPA Scale'] == 0)]
l1 = len(df)
print(l-l1, "outliers removed.")

l = len(df)
df = df[~(df['CGPA'] == 0)]
l1 = len(df)
print(l-l1, "outliers removed.")
1105 outliers removed.
47 outliers removed.
In [12]:
df['CGPA'] = 10*df['CGPA']/df['CGPA Scale']
df
Out[12]:
University Name Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA CGPA Scale Admission UG label
0 New Jersey Institute of Technology MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 72.27 7.227 100.0 1 0
1 University of Southern California MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 72.27 7.227 100.0 1 0
2 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 72.27 7.227 100.0 1 0
3 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 73.00 5.611 100.0 0 0
4 Arizona State University MS 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.50 7.710 10.0 1 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 Texas A and M University College Station MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 3.71 9.200 4.0 0 1401
51320 University of Pennsylvania MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 0.00 8.700 100.0 1 1401
51321 University of Southern California MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 3.71 9.200 4.0 1 1401
51322 University of Southern California MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 0.00 8.700 100.0 1 1401
51323 University of Texas Austin MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 3.71 9.200 4.0 1 1401

50172 rows × 17 columns

In [13]:
df['Topper CGPA'] = 10*df['Topper CGPA']/df['CGPA Scale']
df
Out[13]:
University Name Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA CGPA Scale Admission UG label
0 New Jersey Institute of Technology MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 100.0 1 0
1 University of Southern California MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 100.0 1 0
2 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 100.0 1 0
3 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 7.300 5.611 100.0 0 0
4 Arizona State University MS 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.500 7.710 10.0 1 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 Texas A and M University College Station MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 4.0 0 1401
51320 University of Pennsylvania MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 0.000 8.700 100.0 1 1401
51321 University of Southern California MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 4.0 1 1401
51322 University of Southern California MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 0.000 8.700 100.0 1 1401
51323 University of Texas Austin MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 4.0 1 1401

50172 rows × 17 columns

In [14]:
df['Topper CGPA'] = df['Topper CGPA'].apply(lambda x: 9 if float(x)==0 else float(x))
df
Out[14]:
University Name Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA CGPA Scale Admission UG label
0 New Jersey Institute of Technology MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 100.0 1 0
1 University of Southern California MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 100.0 1 0
2 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 100.0 1 0
3 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 7.300 5.611 100.0 0 0
4 Arizona State University MS 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.500 7.710 10.0 1 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 Texas A and M University College Station MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 4.0 0 1401
51320 University of Pennsylvania MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 100.0 1 1401
51321 University of Southern California MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 4.0 1 1401
51322 University of Southern California MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 100.0 1 1401
51323 University of Texas Austin MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 4.0 1 1401

50172 rows × 17 columns

In [15]:
df = df.drop(columns=['CGPA Scale'])
df
Out[15]:
University Name Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA Admission UG label
0 New Jersey Institute of Technology MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
1 University of Southern California MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
2 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
3 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 7.300 5.611 0 0
4 Arizona State University MS 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.500 7.710 1 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 Texas A and M University College Station MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 0 1401
51320 University of Pennsylvania MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401
51321 University of Southern California MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401
51322 University of Southern California MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401
51323 University of Texas Austin MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401

50172 rows × 16 columns

In [16]:
# Outlier Removal
l = len(df)
df = df[~(df['CGPA'] <= 3)]
l1 = len(df)
print(l-l1, 'outliers removed.')
# df.reset_index(inplace=True)
493 outliers removed.
In [17]:
# Outlier Removal
l = len(df)
df = df[~(df['CGPA'] > 10)]
l1 = len(df)
print(l-l1, 'outliers removed.')
# df.reset_index(inplace=True)
54 outliers removed.
In [18]:
# Outlier Removal
l = len(df)
df = df[~(df['Topper CGPA'] > 10)]
l1 = len(df)
print(l-l1, 'outliers removed.')
# df.reset_index(inplace=True)
3 outliers removed.
In [19]:
# Outlier Removal
l = len(df)
df = df[~(df['Topper CGPA'] <= 5.5)]
l1 = len(df)
print(l-l1, 'outliers removed.')
# df.reset_index(inplace=True)
26 outliers removed.
In [20]:
mean_greA = df['greA'].mean()
print(mean_greA)
df['greA'] = df['greA'].apply(lambda x: mean_greA if x == 0 else x)
df
4.860474231792887
Out[20]:
University Name Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA Admission UG label
0 New Jersey Institute of Technology MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
1 University of Southern California MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
2 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
3 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 7.300 5.611 0 0
4 Arizona State University MS 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.500 7.710 1 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 Texas A and M University College Station MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 0 1401
51320 University of Pennsylvania MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401
51321 University of Southern California MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401
51322 University of Southern California MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401
51323 University of Texas Austin MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401

49596 rows × 16 columns

In [21]:
mean_greQ = df['greQ'].mean()
print(mean_greQ)
df['greQ'] = df['greQ'].apply(lambda x: mean_greQ if x == 0 else x)
df
419.52859101540446
Out[21]:
University Name Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA Admission UG label
0 New Jersey Institute of Technology MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
1 University of Southern California MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
2 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
3 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 7.300 5.611 0 0
4 Arizona State University MS 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.500 7.710 1 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 Texas A and M University College Station MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 0 1401
51320 University of Pennsylvania MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401
51321 University of Southern California MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401
51322 University of Southern California MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401
51323 University of Texas Austin MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401

49596 rows × 16 columns

In [22]:
mean_greV = df['greV'].mean()
print(mean_greV)
df['greV'] = df['greV'].apply(lambda x: mean_greV if x == 0 else x)
df
321.99352770384706
Out[22]:
University Name Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA Admission UG label
0 New Jersey Institute of Technology MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
1 University of Southern California MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
2 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
3 Worcester Polytechnic Institute MS 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 7.300 5.611 0 0
4 Arizona State University MS 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.500 7.710 1 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 Texas A and M University College Station MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 0 1401
51320 University of Pennsylvania MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401
51321 University of Southern California MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401
51322 University of Southern California MS 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401
51323 University of Texas Austin MS 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401

49596 rows × 16 columns

In [23]:
def func0(program):
    if program.upper() == 'MS':
        return 0
    elif program.upper() == 'PHD':
        return 1
    else:
        return 2
df['Program'] = df['Program'].apply(func0)
df
Out[23]:
University Name Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA Admission UG label
0 New Jersey Institute of Technology 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
1 University of Southern California 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
2 Worcester Polytechnic Institute 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0
3 Worcester Polytechnic Institute 0 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 7.300 5.611 0 0
4 Arizona State University 0 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.500 7.710 1 1
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 Texas A and M University College Station 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 0 1401
51320 University of Pennsylvania 0 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401
51321 University of Southern California 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401
51322 University of Southern California 0 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401
51323 University of Texas Austin 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401

49596 rows × 16 columns

In [24]:
# def func1(name):
#     name = name.upper()
#     if name == 'IIT':
#         return 0
#     elif name == 'IIIT':
#         return 1
#     elif name == 'NIT':
#         return 2
#     else:
#         return 3
# df['ugCollege'] = df['ugCollege'].apply(func1)
# df
In [25]:
univ=['Carnegie Mellon University',
       'University of North Carolina Chapel Hill',
       'University of Illinois Urbana-Champaign',
       'University of California San Diego',
       'University of Minnesota Twin Cities',
       'Texas A and M University College Station',
       'Georgia Institute of Technology', 'University of Texas Austin',
       'University of Michigan Ann Arbor', 'Columbia University',
       'University of Maryland College Park', 'Arizona State University',
       'University of Cincinnati', 'Ohio State University Columbus',
       'North Carolina State University', 'Northeastern University',
       'University of Arizona', 'University of Wisconsin Madison',
       'SUNY Buffalo', 'Clemson University', 'University of Utah',
       'Rutgers University New Brunswick/Piscataway',
       'Virginia Polytechnic Institute and State University',
       'Stanford University', 'Massachusetts Institute of Technology',
       'California Institute of Technology',
       'University of Massachusetts Amherst',
       'University of California Irvine', 'Purdue University',
       'Cornell University', 'University of Florida',
       'University of Washington', 'Syracuse University',
       'University of Pennsylvania', 'University of Southern California',
       'University of Texas Dallas', 'University of Illinois Chicago',
       'George Mason University', 'Harvard University',
       'Johns Hopkins University', 'SUNY Stony Brook',
       'Northwestern University', 'New York University',
       'New Jersey Institute of Technology',
       'University of California Santa Barbara', 'Princeton University',
       'University of Colorado Boulder',
       'University of California Los Angeles',
       'University of North Carolina Charlotte',
       'University of Texas Arlington', 'University of California Davis',
       'Worcester Polytechnic Institute',
       'University of California Santa Cruz', 'Wayne State University']
ranks = [48,90,75,45,156,189,72,65,21,18,136,215,561,101,285,344,262,56,340,701,353,262,327,2,1,5,305,219,111,14,167,68,581,15,129,501,231,801,3,24,359,31,39,751,135,13,206,35,90,301,104,601,367,484]
print(len(univ), len(ranks))
univdict = {univ[i]: ranks[i] for i in range(len(univ))} 
print(univdict)
54 54
{'Carnegie Mellon University': 48, 'University of North Carolina Chapel Hill': 90, 'University of Illinois Urbana-Champaign': 75, 'University of California San Diego': 45, 'University of Minnesota Twin Cities': 156, 'Texas A and M University College Station': 189, 'Georgia Institute of Technology': 72, 'University of Texas Austin': 65, 'University of Michigan Ann Arbor': 21, 'Columbia University': 18, 'University of Maryland College Park': 136, 'Arizona State University': 215, 'University of Cincinnati': 561, 'Ohio State University Columbus': 101, 'North Carolina State University': 285, 'Northeastern University': 344, 'University of Arizona': 262, 'University of Wisconsin Madison': 56, 'SUNY Buffalo': 340, 'Clemson University': 701, 'University of Utah': 353, 'Rutgers University New Brunswick/Piscataway': 262, 'Virginia Polytechnic Institute and State University': 327, 'Stanford University': 2, 'Massachusetts Institute of Technology': 1, 'California Institute of Technology': 5, 'University of Massachusetts Amherst': 305, 'University of California Irvine': 219, 'Purdue University': 111, 'Cornell University': 14, 'University of Florida': 167, 'University of Washington': 68, 'Syracuse University': 581, 'University of Pennsylvania': 15, 'University of Southern California': 129, 'University of Texas Dallas': 501, 'University of Illinois Chicago': 231, 'George Mason University': 801, 'Harvard University': 3, 'Johns Hopkins University': 24, 'SUNY Stony Brook': 359, 'Northwestern University': 31, 'New York University': 39, 'New Jersey Institute of Technology': 751, 'University of California Santa Barbara': 135, 'Princeton University': 13, 'University of Colorado Boulder': 206, 'University of California Los Angeles': 35, 'University of North Carolina Charlotte': 90, 'University of Texas Arlington': 301, 'University of California Davis': 104, 'Worcester Polytechnic Institute': 601, 'University of California Santa Cruz': 367, 'Wayne State University': 484}
In [26]:
ranking = []
# uniqueUnivs = list(df['University Name'].unique())
# print((uniqueUnivs))
for index, row in df.iterrows():
    # i = uniqueUnivs.index(row['University Name'])
    # print(row['University Name'])
    ranking.append(univdict[row['University Name']])
print(len(ranking), len(ranks))
df['ranking'] = ranking
df
49596 54
Out[26]:
University Name Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA Admission UG label ranking
0 New Jersey Institute of Technology 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0 751
1 University of Southern California 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0 129
2 Worcester Polytechnic Institute 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0 601
3 Worcester Polytechnic Institute 0 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 7.300 5.611 0 0 601
4 Arizona State University 0 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.500 7.710 1 1 215
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 Texas A and M University College Station 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 0 1401 189
51320 University of Pennsylvania 0 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401 15
51321 University of Southern California 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401 129
51322 University of Southern California 0 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401 129
51323 University of Texas Austin 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401 65

49596 rows × 17 columns

In [27]:
df = df.drop(columns='University Name')
df
Out[27]:
Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA Admission UG label ranking
0 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0 751
1 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0 129
2 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0 601
3 0 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 7.300 5.611 0 0 601
4 0 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.500 7.710 1 1 215
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
51319 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 0 1401 189
51320 0 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401 15
51321 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401 129
51322 0 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401 129
51323 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401 65

49596 rows × 16 columns

In [28]:
df.reset_index(inplace=True)
df = df.drop(columns=['index'])
df
Out[28]:
Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA Admission UG label ranking
0 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0 751
1 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0 129
2 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0 601
3 0 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 7.300 5.611 0 0 601
4 0 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.500 7.710 1 1 215
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
49591 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 0 1401 189
49592 0 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401 15
49593 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401 129
49594 0 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401 129
49595 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401 65

49596 rows × 16 columns

In [29]:
def func2(x):
    if x > 0 and x < 101:
        return 0
    elif x > 100 and x < 251:
        return 1
    elif x > 250 and x < 401:
        return 2
    else:
        return 3

flag = 0
if flag == 1:
    df["rank"] = df["ranking"].apply(func2)
df
Out[29]:
Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA Admission UG label ranking
0 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0 751
1 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0 129
2 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 1 0 601
3 0 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 7.300 5.611 0 0 601
4 0 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.500 7.710 1 1 215
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
49591 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 0 1401 189
49592 0 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401 15
49593 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401 129
49594 0 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1 1401 129
49595 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1 1401 65

49596 rows × 16 columns

In [30]:
df_y = df['Admission']

# To check whether a column has very uneven distribution of class of y (0 or 1)
for i in range(len(df.columns)):
    plt.figure(i, figsize=(25,2))
    plt.title(df.columns[i] + ' vs admission chances')
    plt.scatter(df.iloc[:, i], df_y, c='blue', label=df.columns[i], alpha=0.5)
    plt.xlabel(df.columns[i] + ' (x)')
    plt.ylabel('Rate of admission (y)')
    plt.legend()
    plt.grid(True)
    plt.show()
In [31]:
df['z'] = np.abs(stats.zscore(df['greQ']))
# print(df['z'].describe())
df['z'].to_csv(r'test/test_greQ.csv')

# Outlier Removal
l = len(df)
df = df[~(df['z'] >= 2)]
l1 = len(df)
print(l-l1, 'outliers removed.')
df = df.drop(columns='z')
C:\Users\tanis_vt1gg0x\Anaconda3\lib\site-packages\ipykernel_launcher.py:3: FutureWarning: The signature of `Series.to_csv` was aligned to that of `DataFrame.to_csv`, and argument 'header' will change its default value from False to True: please pass an explicit value to suppress this warning.
  This is separate from the ipykernel package so we can avoid doing imports until
70 outliers removed.
In [32]:
df['z'] = np.abs(stats.zscore(df['greV']))
# print(df['z'].describe())
df['z'].to_csv(r'test/test_greV.csv')

# Outlier Removal
l = len(df)
df = df[~(df['z'] >= 2)]
l1 = len(df)
print(l-l1, 'outliers removed.')
df = df.drop(columns='z')
C:\Users\tanis_vt1gg0x\Anaconda3\lib\site-packages\ipykernel_launcher.py:3: FutureWarning: The signature of `Series.to_csv` was aligned to that of `DataFrame.to_csv`, and argument 'header' will change its default value from False to True: please pass an explicit value to suppress this warning.
  This is separate from the ipykernel package so we can avoid doing imports until
231 outliers removed.
In [33]:
df['z'] = np.abs(stats.zscore(df['greA']))
# print(df['z'].describe())
df['z'].to_csv(r'test/test_greA.csv')

# Outlier Removal
l = len(df)
df = df[~(df['z'] >= 0.1)]
l1 = len(df)
print(l-l1, 'outliers removed.')
df = df.drop(columns='z')
C:\Users\tanis_vt1gg0x\Anaconda3\lib\site-packages\ipykernel_launcher.py:3: FutureWarning: The signature of `Series.to_csv` was aligned to that of `DataFrame.to_csv`, and argument 'header' will change its default value from False to True: please pass an explicit value to suppress this warning.
  This is separate from the ipykernel package so we can avoid doing imports until
91 outliers removed.
In [34]:
df['z'] = np.abs(stats.zscore(df['TOEFL Essay']))
# print(df['z'].describe())
df['z'].to_csv(r'test/test_TOEFL Essay.csv')

# Outlier Removal
l = len(df)
df = df[~(df['z'] >= 2.5)]
l1 = len(df)
print(l-l1, 'outliers removed.')
df = df.drop(columns='z')
C:\Users\tanis_vt1gg0x\Anaconda3\lib\site-packages\ipykernel_launcher.py:3: FutureWarning: The signature of `Series.to_csv` was aligned to that of `DataFrame.to_csv`, and argument 'header' will change its default value from False to True: please pass an explicit value to suppress this warning.
  This is separate from the ipykernel package so we can avoid doing imports until
2 outliers removed.
In [35]:
df['z'] = np.abs(stats.zscore(df['TOEFL Score']))
# print(df['z'].describe())
df['z'].to_csv(r'test/test_TOEFL Score.csv')

# Outlier Removal
l = len(df)
df = df[~(df['z'] >= 4.6)]
l1 = len(df)
print(l-l1, 'outliers removed.')
df = df.drop(columns='z')
C:\Users\tanis_vt1gg0x\Anaconda3\lib\site-packages\ipykernel_launcher.py:3: FutureWarning: The signature of `Series.to_csv` was aligned to that of `DataFrame.to_csv`, and argument 'header' will change its default value from False to True: please pass an explicit value to suppress this warning.
  This is separate from the ipykernel package so we can avoid doing imports until
73 outliers removed.
In [36]:
df['z'] = np.abs(stats.zscore(df['Journal Pubs']))
# print(df['z'].describe())
df['z'].to_csv(r'test/test_Journal Pubs.csv')

# Outlier Removal
l = len(df)
df = df[~(df['z'] >= 4)]
l1 = len(df)
print(l-l1, 'outliers removed.')
df = df.drop(columns='z')
C:\Users\tanis_vt1gg0x\Anaconda3\lib\site-packages\ipykernel_launcher.py:3: FutureWarning: The signature of `Series.to_csv` was aligned to that of `DataFrame.to_csv`, and argument 'header' will change its default value from False to True: please pass an explicit value to suppress this warning.
  This is separate from the ipykernel package so we can avoid doing imports until
402 outliers removed.
In [37]:
df['z'] = np.abs(stats.zscore(df['Intern Exp']))
# print(df['z'].describe())
df['z'].to_csv(r'test/test_Intern Exp.csv')

# Outlier Removal
l = len(df)
df = df[~(df['z'] >= 3)]
l1 = len(df)
print(l-l1, 'outliers removed.')
df = df.drop(columns='z')
C:\Users\tanis_vt1gg0x\Anaconda3\lib\site-packages\ipykernel_launcher.py:3: FutureWarning: The signature of `Series.to_csv` was aligned to that of `DataFrame.to_csv`, and argument 'header' will change its default value from False to True: please pass an explicit value to suppress this warning.
  This is separate from the ipykernel package so we can avoid doing imports until
706 outliers removed.
In [38]:
df['z'] = np.abs(stats.zscore(df['Industry Exp']))
# print(df['z'].describe())
df['z'].to_csv(r'test/test_Industry Exp.csv')

# Outlier Removal
l = len(df)
df = df[~(df['z'] >= 4.2)]
l1 = len(df)
print(l-l1, 'outliers removed.')
df = df.drop(columns='z')
C:\Users\tanis_vt1gg0x\Anaconda3\lib\site-packages\ipykernel_launcher.py:3: FutureWarning: The signature of `Series.to_csv` was aligned to that of `DataFrame.to_csv`, and argument 'header' will change its default value from False to True: please pass an explicit value to suppress this warning.
  This is separate from the ipykernel package so we can avoid doing imports until
502 outliers removed.
In [39]:
df_y = df['Admission']
df = df.drop(columns='Admission')
In [42]:
# To check whether a column has very uneven distribution of class of y (0 or 1) AGAIN!
for i in range(len(df.columns)):
    plt.figure(i, figsize=(25,2))
    plt.title(df.columns[i] + ' vs Admission Chance')
    plt.scatter(df.iloc[:, i], df_y, c='blue', label=df.columns[i], alpha=0.5)
    plt.xlabel(df.columns[i] + ' (x)')
    plt.ylabel('Rate of admission (y)')
    plt.legend()
    plt.grid(True)
    plt.show()
In [43]:
df.to_csv(r'dataframe_preprocessed.csv')
df
Out[43]:
Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA UG label ranking
0 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 0 751
1 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 0 129
2 0 0 0 0.0 0.0 0.0 101.0 0.0 430.0 740.0 4.5 7.227 7.227 0 601
3 0 0 0 0.0 0.0 0.0 91.0 0.0 144.0 157.0 3.0 7.300 5.611 0 601
4 0 0 0 0.0 0.0 0.0 0.0 0.0 149.0 157.0 3.0 8.500 7.710 1 215
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
49591 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1401 189
49592 0 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1401 15
49593 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1401 129
49594 0 0 0 0.0 0.0 0.0 112.0 0.0 154.0 169.0 3.0 9.000 8.700 1401 129
49595 0 0 0 0.0 0.0 0.0 102.0 0.0 680.0 800.0 3.0 9.275 9.200 1401 65

47519 rows × 15 columns

In [44]:
df['Admission'] = df_y

print(df.shape)
fig, ax = plt.subplots(figsize=(9, 5))
plt.title('Correlation between different features', fontsize=8)
ax.title.set_position([0.5, 1.05])
# ax.axis('off')
seaborn.heatmap(df.corr(), ax=ax, annot=True, linewidths=0.05, fmt='.2f',cmap="magma")
plt.show()
(47519, 16)
In [45]:
df = shuffle(df)
df.reset_index(inplace=True)
df_y = df['Admission']
df = df.drop(columns=['index', 'Admission'])
df
Out[45]:
Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA UG label ranking
0 0 9 24 3.0 0.0 1.0 114.0 0.0 157.0 164.0 4.0 9.50 8.860 734 72
1 0 0 0 0.0 0.0 0.0 104.0 0.0 152.0 162.0 2.5 8.80 7.760 469 167
2 0 0 0 0.0 0.0 0.0 111.0 0.0 153.0 163.0 4.0 9.00 8.417 899 129
3 0 0 0 0.0 0.0 0.0 104.0 0.0 510.0 730.0 4.0 8.50 7.200 469 344
4 0 0 0 0.0 0.0 0.0 0.0 0.0 139.0 161.0 3.0 9.70 8.000 1039 501
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
47514 0 0 0 0.0 0.0 0.0 0.0 0.0 510.0 740.0 3.0 7.60 6.212 662 35
47515 0 0 0 0.0 0.0 0.0 98.0 27.0 152.0 165.0 4.0 9.43 9.110 907 129
47516 0 0 0 0.0 0.0 0.0 95.0 0.0 145.0 164.0 3.0 9.10 7.560 47 484
47517 0 0 0 0.0 0.0 0.0 110.0 0.0 620.0 780.0 3.5 8.30 7.970 228 167
47518 0 0 0 0.0 0.0 0.0 99.0 24.0 151.0 167.0 3.0 9.00 6.500 501 215

47519 rows × 15 columns

In [47]:
df.replace([np.inf, -np.inf], np.nan).dropna()
Out[47]:
Program Research Exp Industry Exp Intern Exp Journal Pubs ConfPubs TOEFL Score TOEFL Essay greV greQ greA Topper CGPA CGPA UG label ranking
0 0 9 24 3.0 0.0 1.0 114.0 0.0 157.0 164.0 4.0 9.50 8.860 734 72
1 0 0 0 0.0 0.0 0.0 104.0 0.0 152.0 162.0 2.5 8.80 7.760 469 167
2 0 0 0 0.0 0.0 0.0 111.0 0.0 153.0 163.0 4.0 9.00 8.417 899 129
3 0 0 0 0.0 0.0 0.0 104.0 0.0 510.0 730.0 4.0 8.50 7.200 469 344
4 0 0 0 0.0 0.0 0.0 0.0 0.0 139.0 161.0 3.0 9.70 8.000 1039 501
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
47514 0 0 0 0.0 0.0 0.0 0.0 0.0 510.0 740.0 3.0 7.60 6.212 662 35
47515 0 0 0 0.0 0.0 0.0 98.0 27.0 152.0 165.0 4.0 9.43 9.110 907 129
47516 0 0 0 0.0 0.0 0.0 95.0 0.0 145.0 164.0 3.0 9.10 7.560 47 484
47517 0 0 0 0.0 0.0 0.0 110.0 0.0 620.0 780.0 3.5 8.30 7.970 228 167
47518 0 0 0 0.0 0.0 0.0 99.0 24.0 151.0 167.0 3.0 9.00 6.500 501 215

47519 rows × 15 columns

In [48]:
l = len(df)

dfa = np.array(df[:(l//5)])
dfb = np.array(df[(l//5):(l*2)//5])
dfc = np.array(df[((2*l)//5):(l*3)//5])
dfd = np.array(df[((3*l)//5):(l*4)//5])
dfe = np.array(df[((4*l)//5):])

df_ya = np.array(df_y[:(l//5)])
df_yb = np.array(df_y[(l//5):(l*2)//5])
df_yc = np.array(df_y[((2*l)//5):(l*3)//5])
df_yd = np.array(df_y[((3*l)//5):(l*4)//5])
df_ye = np.array(df_y[((4*l)//5):])

dfbcde = np.concatenate((dfb,dfc,dfd,dfe))
dfacde = np.concatenate((dfa,dfc,dfd,dfe))
dfabde = np.concatenate((dfa,dfb,dfd,dfe))
dfabce = np.concatenate((dfa,dfb,dfc,dfe))
dfabcd = np.concatenate((dfa,dfb,dfc,dfd))

df_ybcde = np.concatenate((df_yb,df_yc,df_yd,df_ye))
df_yacde = np.concatenate((df_ya,df_yc,df_yd,df_ye))
df_yabde = np.concatenate((df_ya,df_yb,df_yd,df_ye))
df_yabce = np.concatenate((df_ya,df_yb,df_yc,df_ye))
df_yabcd = np.concatenate((df_ya,df_yb,df_yc,df_yd))

df_n = [dfbcde, dfacde, dfabde, dfabce, dfabcd]
df_y_n = [df_ybcde, df_yacde, df_yabde, df_yabce, df_yabcd]
In [49]:
print(len(dfa), len(dfb), len(dfc), len(dfd), len(dfe))
print(len(df_ya), len(df_yb), len(df_yc), len(df_yd), len(df_ye))
print(len(dfbcde), len(dfacde), len(dfabde), len(dfabce), len(dfabcd))
print(len(df_ybcde), len(df_yacde), len(df_yabde), len(df_yabce), len(df_yabcd))
9503 9504 9504 9504 9504
9503 9504 9504 9504 9504
38016 38015 38015 38015 38015
38016 38015 38015 38015 38015
In [53]:
# 5 folds
n_folds = 5

flag = 0
if flag == 1:
    
    knn1 = [KNeighborsClassifier(2)]*(n_folds)
    for i in range(n_folds):
        start = time.time()
        knn1[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('knn1' + str(i+1) + ' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/knn1-'+str(n_folds), 'wb') as file:
            pickle.dump(knn1, file)
    
    
    knn2 = [KNeighborsClassifier(3)]*(n_folds)
    for i in range(n_folds):
        start = time.time()
        knn2[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('knn2' + str(i+1) + ' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/knn2-'+str(n_folds), 'wb') as file:
            pickle.dump(knn2, file)
    
    
    rfc1 = [RandomForestClassifier(n_estimators=10)]*(n_folds)
    for i in range(n_folds):
        start = time.time()
        rfc1[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('rfc1'+str(i+1)+' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/rfc1-'+str(n_folds), 'wb') as file:
            pickle.dump(rfc1, file)
    
    
    rfc2 = [RandomForestClassifier(n_estimators=15)]*(n_folds)
    for i in range(n_folds):
        start = time.time()
        rfc2[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('rfc2' + str(i+1) + ' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/rfc2-'+str(n_folds), 'wb') as file:
            pickle.dump(rfc2, file)
    
    
    rfc3 = [RandomForestClassifier(n_estimators=1000)]*(n_folds)
    for i in range(n_folds):
        start = time.time()
        rfc3[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('rfc3' + str(i+1) + ' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/rfc3-'+str(n_folds), 'wb') as file:
            pickle.dump(rfc3, file)
    
    
    mlp1 = [MLPClassifier(alpha=0.5, max_iter=1000)]*(n_folds)
    for i in range(n_folds):
        start = time.time()
        mlp1[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('mlp1' + str(i+1) + ' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/mlp1-'+str(n_folds), 'wb') as file:
            pickle.dump(mlp1, file)
    
    
    mlp2 = [MLPClassifier(alpha=0.5, max_iter=2000)]*(n_folds)
    for i in range(n_folds):
        start = time.time()
        mlp2[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('mlp2' + str(i+1) + ' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/mlp2-'+str(n_folds), 'wb') as file:
            pickle.dump(mlp2, file)
    
    
    logr1 = [LogisticRegression(penalty='l1',tol=0.01)]*(n_folds)
    for i in range(n_folds):
        start = time.time()
        logr1[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('logr1'+str(i+1)+' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/logr1-'+str(n_folds), 'wb') as file:
            pickle.dump(logr1, file)

    
    logr2 = [LogisticRegression(penalty='l2',tol=0.01)]*(n_folds)
    for i in range(n_folds):
        start = time.time()
        logr2[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('logr2' + str(i+1) + ' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/logr2-'+str(n_folds), 'wb') as file:
            pickle.dump(logr2, file)


    bnb1 = [BernoulliNB()]*(n_folds)
    for i in range(n_folds):    
        start = time.time()
        bnb1[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('bnb1'+str(i+1)+' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/bnb1-'+str(n_folds), 'wb') as file:
        pickle.dump(bnb1, file)
    
    
    cnb1 = [ComplementNB()]*(n_folds)
    for i in range(n_folds):    
        start = time.time()
        cnb1[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('cnb1'+str(i+1)+' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/cnb1-'+str(n_folds), 'wb') as file:
        pickle.dump(cnb1, file)

        
    gnb1 = [GaussianNB()]*(n_folds)
    for i in range(n_folds):    
        start = time.time()
        gnb1[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('gnb1'+str(i+1)+' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/gnb1-'+str(n_folds), 'wb') as file:
        pickle.dump(gnb1, file)
        

    mnb1 = [MultinomialNB()]*(n_folds)
    for i in range(n_folds):    
        start = time.time()
        mnb1[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('mnb1'+str(i+1)+' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/mnb1-'+str(n_folds), 'wb') as file:
        pickle.dump(mnb1, file)


    svm1 = [svm.SVC(kernel='poly', degree=1)]*(n_folds)
    for i in range(n_folds):    
        start = time.time()
        svm1[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('svm1' + str(i+1) + ' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/svm1-'+str(n_folds), 'wb') as file:
        pickle.dump(svm1, file)
    
    
#     svm2 = [svm.SVC(kernel='poly', degree=2)]*(n_folds)
#     for i in range(n_folds):
#         start = time.time()
#         svm2[i].fit(df_n[i],df_y_n[i])
#         end = time.time()
#         print('svm2'+str(i+1)+' - Total Time: %.4f s' % (end-start))
#     with open('pickle-files/svm2-'+str(n_folds), 'wb') as file:
#             pickle.dump(svm2, file)
    
    
#     svm3 = [svm.SVC(kernel='poly', degree=3)]*(n_folds)
#     for i in range(n_folds):
#         start = time.time()
#         svm3[i].fit(df_n[i],df_y_n[i])
#         end = time.time()
#         print('svm3'+str(n_folds)+' - Total Time: %.4f s' % (end-start))
#     with open('pickle-files/svm3-'+str(n_folds), 'wb') as file:
#             pickle.dump(svm3, file)


#     svm4 = [svm.SVC(kernel='poly', degree=4)]*(n_folds)
#     for i in range(n_folds):
#         start = time.time()
#         svm4[i].fit(df_n[i],df_y_n[i])
#         end = time.time()
#         print('svm4'+str(n_folds)+' - Total Time: %.4f s' % (end-start))
#     with open('pickle-files/svm4-'+str(n_folds), 'wb') as file:
#             pickle.dump(svm4, file)
    
    
    svm5 = [svm.SVC(kernel='rbf', gamma='scale')]*(n_folds)
    for i in range(n_folds):    
        start = time.time()
        svm5[i].fit(df_n[i],df_y_n[i])
        end = time.time()
        print('svm5'+str(i+1)+' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/svm5-'+str(n_folds), 'wb') as file:
            pickle.dump(svm5, file)
    
    
    svm6 = [svm.SVC(kernel='linear')]*(n_folds)
    for i in range(n_folds):
        start = time.time()
        svm6[i].fit(df_n[i],df_y_n[i]) 
        end = time.time()
        print('svm6'+str(i+1)+' - Total Time: %.4f s' % (end-start))
    with open('pickle-files/svm6-'+str(n_folds), 'wb') as file:
        pickle.dump(svm6, file) 

elif flag == 0:
    with open('pickle-files/svm1-'+str(n_folds), 'rb') as file:
        svm1 = pickle.load(file)    
#     with open('pickle-files/svm2-'+str(n_folds), 'rb') as file:
#         svm2 = pickle.load(file)
#     with open('pickle-files/svm3-'+str(n_folds), 'rb') as file:
#         svm3 = pickle.load(file)
#     with open('pickle-files/svm4-'+str(n_folds), 'rb') as file:
#         svm4 = pickle.load(file)
    with open('pickle-files/svm5-'+str(n_folds), 'rb') as file:
        svm5 = pickle.load(file)
    with open('pickle-files/svm6-'+str(n_folds), 'rb') as file:
        svm6 = pickle.load(file)
    with open('pickle-files/knn1-'+str(n_folds), 'rb') as file:
        knn1 = pickle.load(file)
    with open('pickle-files/knn2-'+str(n_folds), 'rb') as file:
        knn2 = pickle.load(file)
    with open('pickle-files/rfc1-'+str(n_folds), 'rb') as file:
        rfc1 = pickle.load(file)
    with open('pickle-files/rfc2-'+str(n_folds), 'rb') as file:
        rfc2 = pickle.load(file)
    with open('pickle-files/rfc3-'+str(n_folds), 'rb') as file:
        rfc3 = pickle.load(file)
    with open('pickle-files/mlp1-'+str(n_folds), 'rb') as file:
        mlp1 = pickle.load(file)
    with open('pickle-files/mlp2-'+str(n_folds), 'rb') as file:
        mlp2 = pickle.load(file)
    with open('pickle-files/logr1-'+str(n_folds), 'rb') as file:
        logr1 = pickle.load(file)
    with open('pickle-files/logr2-'+str(n_folds), 'rb') as file:
        logr2 = pickle.load(file)
    with open('pickle-files/bnb1-'+str(n_folds), 'rb') as file:
        bnb1 = pickle.load(file)
    with open('pickle-files/cnb1-'+str(n_folds), 'rb') as file:
        cnb1 = pickle.load(file)
    with open('pickle-files/gnb1-'+str(n_folds), 'rb') as file:
        gnb1 = pickle.load(file)
    with open('pickle-files/mnb1-'+str(n_folds), 'rb') as file:
        mnb1 = pickle.load(file)
In [57]:
# Testing Accuracy:
        
svm1_predy = []
svm2_predy = []
svm3_predy = []
svm4_predy = []
svm5_predy = []
svm6_predy = []
knn1_predy = []
knn2_predy = []
rfc1_predy = []
rfc2_predy = []
rfc3_predy = []
mlp1_predy = []
mlp2_predy = []
logr1_predy = []
logr2_predy = []
bnb1_predy = []
cnb1_predy = []
gnb1_predy = []
mnb1_predy = []
        
for i,a in zip(range(n_folds), [dfa, dfb, dfc, dfd, dfe]):
    svm1_predy += [svm1[i].predict(a)]
#     svm2_predy += [svm2[i].predict(a)]
#     svm3_predy += [svm3[i].predict(a)]
#     svm4_predy += [svm4[i].predict(a)]
    svm5_predy += [svm5[i].predict(a)]
    svm6_predy += [svm6[i].predict(a)]
    knn1_predy += [knn1[i].predict(a)]
    knn2_predy += [knn2[i].predict(a)]
    rfc1_predy += [rfc1[i].predict(a)]
    rfc2_predy += [rfc2[i].predict(a)]
    rfc3_predy += [rfc3[i].predict(a)]
    mlp1_predy += [mlp1[i].predict(a)]
    mlp2_predy += [mlp2[i].predict(a)]
    logr1_predy += [logr1[i].predict(a)]
    logr2_predy += [logr2[i].predict(a)]
    bnb1_predy += [bnb1[i].predict(a)]
    cnb1_predy += [cnb1[i].predict(a)]
    gnb1_predy += [gnb1[i].predict(a)]
    mnb1_predy += [mnb1[i].predict(a)]

svm1_acc = []
svm2_acc = []
svm3_acc = []
svm4_acc = []
svm5_acc = []
svm6_acc = []
knn1_acc = []
knn2_acc = []
rfc1_acc = []
rfc2_acc = []
rfc3_acc = []
mlp1_acc = []
mlp2_acc = []
logr1_acc = []
logr2_acc = []
bnb1_acc = []
cnb1_acc = []
gnb1_acc = []
mnb1_acc = []

for i,fold in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    svm1_acc += [accuracy_score(fold, svm1_predy[i])]
#     svm2_acc += [accuracy_score(fold, svm2_predy[i])]
#     svm3_acc += [accuracy_score(fold, svm3_predy[i])]
#     svm4_acc += [accuracy_score(fold, svm4_predy[i])]
    svm5_acc += [accuracy_score(fold, svm5_predy[i])]
    svm6_acc += [accuracy_score(fold, svm6_predy[i])]
    knn1_acc += [accuracy_score(fold, knn1_predy[i])]
    knn2_acc += [accuracy_score(fold, knn2_predy[i])]
    rfc1_acc += [accuracy_score(fold, rfc1_predy[i])]
    rfc2_acc += [accuracy_score(fold, rfc2_predy[i])]
    rfc3_acc += [accuracy_score(fold, rfc3_predy[i])]
    mlp1_acc += [accuracy_score(fold, mlp1_predy[i])]
    mlp2_acc += [accuracy_score(fold, mlp2_predy[i])]
    logr1_acc += [accuracy_score(fold, logr1_predy[i])]
    logr2_acc += [accuracy_score(fold, logr2_predy[i])]
    bnb1_acc += [accuracy_score(fold, bnb1_predy[i])]
    cnb1_acc += [accuracy_score(fold, cnb1_predy[i])]
    gnb1_acc += [accuracy_score(fold, gnb1_predy[i])]
    mnb1_acc += [accuracy_score(fold, mnb1_predy[i])]
    
    print('SVM1'+str(i+1), svm1_acc[i])
#     print('SVM2'+str(i+1), svm2_acc[i])
#     print('SVM3'+str(i+1), svm3_acc[i])
#     print('SVM4'+str(i+1), svm4_acc[i])
    print('SVM5'+str(i+1), svm5_acc[i])
    print('SVM6'+str(i+1), svm6_acc[i])
    print('KNN1'+str(i+1), knn1_acc[i])
    print('KNN2'+str(i+1), knn2_acc[i])
    print('RFC1'+str(i+1), rfc1_acc[i])
    print('RFC2'+str(i+1), rfc2_acc[i])
    print('RFC3'+str(i+1), rfc3_acc[i])
    print('MLP1'+str(i+1), mlp1_acc[i])
    print('MLP2'+str(i+1), mlp2_acc[i])
    print('LOGR1'+str(i+1), logr1_acc[i])
    print('LOGR2'+str(i+1), logr2_acc[i])
    print('BNB1'+str(i+1), bnb1_acc[i])
    print('CNB1'+str(i+1), cnb1_acc[i])
    print('GNB1'+str(i+1), gnb1_acc[i])
    print('MNB1'+str(i+1), mnb1_acc[i])
SVM11 0.6074923708302641
SVM51 0.5884457539724297
SVM61 0.6066505314111333
KNN11 0.7918552036199095
KNN21 0.7938545722403452
RFC11 0.9833736714721667
RFC21 0.993686204356519
RFC31 0.9998947700726086
MLP11 0.5698200568241608
MLP21 0.5466694727980638
LOGR11 0.6091760496685257
LOGR21 0.573503104282858
BNB11 0.5301483741976218
CNB11 0.5638219509628538
GNB11 0.5382510786067558
MNB11 0.5636114911080712
SVM12 0.609006734006734
SVM52 0.5843855218855218
SVM62 0.6063762626262627
KNN12 0.79503367003367
KNN22 0.7896675084175084
RFC12 0.9835858585858586
RFC22 0.9923190235690236
RFC32 0.9997895622895623
MLP12 0.5550294612794613
MLP22 0.5629208754208754
LOGR12 0.60753367003367
LOGR22 0.5817550505050505
BNB12 0.5316708754208754
CNB12 0.5674452861952862
GNB12 0.5360900673400674
MNB12 0.5674452861952862
SVM13 0.6021675084175084
SVM53 0.5823863636363636
SVM63 0.6060606060606061
KNN13 0.7908249158249159
KNN23 0.7891414141414141
RFC13 0.9816919191919192
RFC23 0.9943181818181818
RFC33 1.0
MLP13 0.5577651515151515
MLP23 0.5526094276094277
LOGR13 0.6035353535353535
LOGR23 0.577020202020202
BNB13 0.5242003367003367
CNB13 0.5669191919191919
GNB13 0.5300925925925926
MNB13 0.5664983164983165
SVM14 0.6095328282828283
SVM54 0.5873316498316499
SVM64 0.60753367003367
KNN14 0.7920875420875421
KNN24 0.7911405723905723
RFC14 0.9826388888888888
RFC24 0.9938973063973064
RFC34 0.9998947811447811
MLP14 0.5566077441077442
MLP24 0.5507154882154882
LOGR14 0.6065867003367004
LOGR24 0.5785984848484849
BNB14 0.5240951178451179
CNB14 0.5634469696969697
GNB14 0.5325126262626263
MNB14 0.5634469696969697
SVM15 0.6098484848484849
SVM55 0.5874368686868687
SVM65 0.6055345117845118
KNN15 0.5608164983164983
KNN25 0.5816498316498316
RFC15 0.6273148148148148
RFC25 0.6419402356902357
RFC35 0.648989898989899
MLP15 0.5585016835016835
MLP25 0.5441919191919192
LOGR15 0.6113215488215489
LOGR25 0.5793350168350169
BNB15 0.5352483164983165
CNB15 0.5688131313131313
GNB15 0.5441919191919192
MNB15 0.5691287878787878
In [58]:
arr = [svm1_predy, svm2_predy, svm3_predy, svm4_predy, svm5_predy, svm6_predy, knn1_predy, knn2_predy, rfc1_predy, rfc2_predy, rfc3_predy, mlp1_predy, mlp2_predy, logr1_predy, logr2_predy, bnb1_predy, cnb1_predy, gnb1_predy, mnb1_predy]
with open('pickle-files/testing_predy', 'wb') as file:
    pickle.dump(arr, file)
In [59]:
# Training Accuracy:

svm1_predy_train = []
svm2_predy_train = []
svm3_predy_train = []
svm4_predy_train = []
svm5_predy_train = []
svm6_predy_train = []
knn1_predy_train = []
knn2_predy_train = []
rfc1_predy_train = []
rfc2_predy_train = []
rfc3_predy_train = []
mlp1_predy_train = []
mlp2_predy_train = []
logr1_predy_train = []
logr2_predy_train = []
bnb1_predy_train = []
cnb1_predy_train = []
gnb1_predy_train = []
mnb1_predy_train = []

for i in range(n_folds):
    svm1_predy_train += [svm1[i].predict(df_n[i])]
#     svm2_predy_train += [svm2[i].predict(df_n[i])]
#     svm3_predy_train += [svm3[i].predict(df_n[i])]
#     svm4_predy_train += [svm4[i].predict(df_n[i])]
    svm5_predy_train += [svm5[i].predict(df_n[i])]
    svm6_predy_train += [svm6[i].predict(df_n[i])]
    knn1_predy_train += [knn1[i].predict(df_n[i])]
    knn2_predy_train += [knn2[i].predict(df_n[i])]
    rfc1_predy_train += [rfc1[i].predict(df_n[i])]
    rfc2_predy_train += [rfc2[i].predict(df_n[i])]
    rfc3_predy_train += [rfc3[i].predict(df_n[i])]
    mlp1_predy_train += [mlp1[i].predict(df_n[i])]
    mlp2_predy_train += [mlp2[i].predict(df_n[i])]
    logr1_predy_train += [logr1[i].predict(df_n[i])]
    logr2_predy_train += [logr2[i].predict(df_n[i])]
    bnb1_predy_train += [bnb1[i].predict(df_n[i])]
    cnb1_predy_train += [cnb1[i].predict(df_n[i])]
    gnb1_predy_train += [gnb1[i].predict(df_n[i])]
    mnb1_predy_train += [mnb1[i].predict(df_n[i])]

svm1_acc_train = []
svm2_acc_train = []
svm3_acc_train = []
svm4_acc_train = []
svm5_acc_train = []
svm6_acc_train = []
knn1_acc_train = []
knn2_acc_train = []
rfc1_acc_train = []
rfc2_acc_train = []
rfc3_acc_train = []
mlp1_acc_train = []
mlp2_acc_train = []
logr1_acc_train = []
logr2_acc_train = []
bnb1_acc_train = []
cnb1_acc_train = []
gnb1_acc_train = []
mnb1_acc_train = []

for i in range(n_folds):
    svm1_acc_train += [accuracy_score(df_y_n[i], svm1_predy_train[i])]
#     svm2_acc_train += [accuracy_score(df_y_n[i], svm2_predy_train[i])]
#     svm3_acc_train += [accuracy_score(df_y_n[i], svm3_predy_train[i])]
#     svm4_acc_train += [accuracy_score(df_y_n[i], svm4_predy_train[i])]
    svm5_acc_train += [accuracy_score(df_y_n[i], svm5_predy_train[i])]
    svm6_acc_train += [accuracy_score(df_y_n[i], svm6_predy_train[i])]
    knn1_acc_train += [accuracy_score(df_y_n[i], knn1_predy_train[i])]
    knn2_acc_train += [accuracy_score(df_y_n[i], knn2_predy_train[i])]
    rfc1_acc_train += [accuracy_score(df_y_n[i], rfc1_predy_train[i])]
    rfc2_acc_train += [accuracy_score(df_y_n[i], rfc2_predy_train[i])]
    rfc3_acc_train += [accuracy_score(df_y_n[i], rfc3_predy_train[i])]
    mlp1_acc_train += [accuracy_score(df_y_n[i], mlp1_predy_train[i])]
    mlp2_acc_train += [accuracy_score(df_y_n[i], mlp2_predy_train[i])]
    logr1_acc_train += [accuracy_score(df_y_n[i], logr1_predy_train[i])]
    logr2_acc_train += [accuracy_score(df_y_n[i], logr2_predy_train[i])]
    bnb1_acc_train += [accuracy_score(df_y_n[i], bnb1_predy_train[i])]
    cnb1_acc_train += [accuracy_score(df_y_n[i], cnb1_predy_train[i])]
    gnb1_acc_train += [accuracy_score(df_y_n[i], gnb1_predy_train[i])]
    mnb1_acc_train += [accuracy_score(df_y_n[i], mnb1_predy_train[i])]
    
    
    print('SVM1'+str(i+1), svm1_acc_train[i])
#     print('SVM2'+str(i+1), svm2_acc_train[i])
#     print('SVM3'+str(i+1), svm3_acc_train[i])
#     print('SVM4'+str(i+1), svm4_acc_train[i])
    print('SVM5'+str(i+1), svm5_acc_train[i])
    print('SVM6'+str(i+1), svm6_acc_train[i])
    print('KNN1'+str(i+1), knn1_acc_train[i])
    print('KNN2'+str(i+1), knn2_acc_train[i])
    print('RFC1'+str(i+1), rfc1_acc_train[i])
    print('RFC2'+str(i+1), rfc2_acc_train[i])
    print('RFC3'+str(i+1), rfc3_acc_train[i])
    print('MLP1'+str(i+1), mlp1_acc_train[i])
    print('MLP2'+str(i+1), mlp2_acc_train[i])
    print('LOGR1'+str(i+1), logr1_acc_train[i])
    print('LOGR2'+str(i+1), logr2_acc_train[i])
    print('BNB1'+str(i+1), bnb1_acc_train[i])
    print('CNB1'+str(i+1), cnb1_acc_train[i])
    print('GNB1'+str(i+1), gnb1_acc_train[i])
    print('MNB1'+str(i+1), mnb1_acc_train[i])
SVM11 0.6076388888888888
SVM51 0.585385101010101
SVM61 0.6063762626262627
KNN11 0.7346906565656566
KNN21 0.7378998316498316
RFC11 0.8938078703703703
RFC21 0.9056186868686869
RFC31 0.9121685606060606
MLP11 0.55697601010101
MLP21 0.5526094276094277
LOGR11 0.6072443181818182
LOGR21 0.5791771885521886
BNB11 0.5288036616161617
CNB11 0.5666561447811448
GNB11 0.5357218013468014
MNB11 0.5666298400673401
SVM12 0.607260291990004
SVM52 0.586400105221623
SVM62 0.6064448244114166
KNN12 0.7338945153228988
KNN22 0.7389451532289886
RFC12 0.89375246613179
RFC22 0.9059581744048402
RFC32 0.9121925555701696
MLP12 0.5606734183874786
MLP22 0.5485466263317111
LOGR12 0.6076548730764172
LOGR22 0.5771142969880311
BNB12 0.528422990924635
CNB12 0.5657503616993292
GNB12 0.5362620018413784
MNB12 0.5656714454820465
SVM13 0.6089701433644614
SVM53 0.5868999079310798
SVM63 0.6065237406286992
KNN13 0.7349467315533342
KNN23 0.7390766802577929
RFC13 0.894225963435486
RFC23 0.9054583716953833
RFC33 0.9121399447586479
MLP13 0.5599894778376956
MLP23 0.5511245560962778
LOGR13 0.6086544784953308
LOGR23 0.5782980402472708
BNB13 0.5302906747336578
CNB13 0.5658818887281336
GNB13 0.5377614099697487
MNB13 0.5659081941338945
SVM14 0.6071287649611995
SVM54 0.5856635538603183
SVM64 0.6061554649480468
KNN14 0.7346310666842036
KNN24 0.7385768775483362
RFC14 0.893989214783638
RFC24 0.905563593318427
RFC34 0.9121662501644088
MLP14 0.5602788373010654
MLP24 0.5515980533999737
LOGR14 0.6078916217282652
LOGR24 0.5779034591608575
BNB14 0.5303169801394186
CNB14 0.5667499671182428
GNB14 0.5371563856372484
MNB14 0.5666710509009601
SVM15 0.6070498487439169
SVM55 0.5856372484545574
SVM65 0.6066552676575037
KNN15 0.7924503485466263
KNN25 0.790950940418256
RFC15 0.9828225700381429
RFC25 0.9935551755885834
RFC35 0.9998947783769565
MLP15 0.5598053399973695
MLP25 0.5532289885571485
LOGR15 0.6067078784690254
LOGR25 0.5777193213205314
BNB15 0.5275286071287649
CNB15 0.5654083914244378
GNB15 0.5342364855977904
MNB15 0.5652505589898724
In [60]:
arr = [svm1_predy_train, svm2_predy_train, svm3_predy_train, svm4_predy_train, svm5_predy_train, svm6_predy_train, knn1_predy_train, knn2_predy_train, rfc1_predy_train, rfc2_predy_train, rfc3_predy_train, mlp1_predy_train, mlp2_predy_train, logr1_predy_train, logr2_predy_train, bnb1_predy_train, cnb1_predy_train, gnb1_predy_train, mnb1_predy_train]
with open('pickle-files/training_predy', 'wb') as file:
    pickle.dump(arr, file)
In [61]:
print("Average Training Accuracy SVM1:", sum(svm1_acc_train)/n_folds)
# print("Average Training Accuracy SVM2:", sum(svm2_acc_train)/n_folds)
# print("Average Training Accuracy SVM3:", sum(svm3_acc_train)/n_folds)
# print("Average Training Accuracy SVM4:", sum(svm4_acc_train)/n_folds)
print("Average Training Accuracy SVM5:", sum(svm5_acc_train)/n_folds)
print("Average Training Accuracy SVM6:", sum(svm6_acc_train)/n_folds)
print("Average Training Accuracy KNN1:", sum(knn1_acc_train)/n_folds)
print("Average Training Accuracy KNN2:", sum(knn2_acc_train)/n_folds)
print("Average Training Accuracy RFC1:", sum(rfc1_acc_train)/n_folds)
print("Average Training Accuracy RFC2:", sum(rfc2_acc_train)/n_folds)
print("Average Training Accuracy RFC3:", sum(rfc3_acc_train)/n_folds)
print("Average Training Accuracy MLP1:", sum(mlp1_acc_train)/n_folds)
print("Average Training Accuracy MLP2:", sum(mlp2_acc_train)/n_folds)
print("Average Training Accuracy LOGR1:", sum(logr1_acc_train)/n_folds)
print("Average Training Accuracy LOGR2:", sum(logr2_acc_train)/n_folds)
print("Average Training Accuracy BNB1:", sum(bnb1_acc_train)/n_folds)
print("Average Training Accuracy CNB1:", sum(cnb1_acc_train)/n_folds)
print("Average Training Accuracy GNB1:", sum(gnb1_acc_train)/n_folds)
print("Average Training Accuracy MNB1:", sum(mnb1_acc_train)/n_folds)

print("Average Testing Accuracy SVM1:", sum(svm1_acc)/n_folds)
# print("Average Testing Accuracy SVM2:", sum(svm2_acc)/n_folds)
# print("Average Testing Accuracy SVM3:", sum(svm3_acc)/n_folds)
# print("Average Testing Accuracy SVM4:", sum(svm4_acc)/n_folds)
print("Average Testing Accuracy SVM5:", sum(svm5_acc)/n_folds)
print("Average Testing Accuracy SVM6:", sum(svm6_acc)/n_folds)
print("Average Testing Accuracy KNN1:", sum(knn1_acc)/n_folds)
print("Average Testing Accuracy KNN2:", sum(knn2_acc)/n_folds)
print("Average Testing Accuracy RFC1:", sum(rfc1_acc)/n_folds)
print("Average Testing Accuracy RFC2:", sum(rfc2_acc)/n_folds)
print("Average Testing Accuracy RFC3:", sum(rfc3_acc)/n_folds)
print("Average Testing Accuracy MLP1:", sum(mlp1_acc)/n_folds)
print("Average Testing Accuracy MLP2:", sum(mlp2_acc)/n_folds)
print("Average Testing Accuracy LOGR1:", sum(logr1_acc)/n_folds)
print("Average Testing Accuracy LOGR2:", sum(logr2_acc)/n_folds)
print("Average Testing Accuracy BNB1:", sum(bnb1_acc)/n_folds)
print("Average Testing Accuracy CNB1:", sum(cnb1_acc)/n_folds)
print("Average Testing Accuracy GNB1:", sum(gnb1_acc)/n_folds)
print("Average Testing Accuracy MNB1:", sum(mnb1_acc)/n_folds)
Average Training Accuracy SVM1: 0.6076095875896941
Average Training Accuracy SVM5: 0.585997183295536
Average Training Accuracy SVM6: 0.6064311120543857
Average Training Accuracy KNN1: 0.746122663734544
Average Training Accuracy KNN2: 0.7490898966206411
Average Training Accuracy RFC1: 0.9117196169518854
Average Training Accuracy RFC2: 0.9232308003751841
Average Training Accuracy RFC3: 0.9297124178952487
Average Training Accuracy MLP1: 0.559544616724924
Average Training Accuracy MLP2: 0.5514215303989076
Average Training Accuracy LOGR1: 0.6076306339901715
Average Training Accuracy LOGR2: 0.5780424612537759
Average Training Accuracy BNB1: 0.5290725829085275
Average Training Accuracy CNB1: 0.5660893507502577
Average Training Accuracy GNB1: 0.5362276168785935
Average Training Accuracy MNB1: 0.5660262179148228
Average Testing Accuracy SVM1: 0.6076095852771639
Average Testing Accuracy SVM5: 0.5859972316025667
Average Testing Accuracy SVM6: 0.6064311163832368
Average Testing Accuracy KNN1: 0.7461235659765071
Average Testing Accuracy KNN2: 0.7490907797679343
Average Testing Accuracy RFC1: 0.9117210305907296
Average Testing Accuracy RFC2: 0.9232321903662534
Average Testing Accuracy RFC3: 0.9297138024993702
Average Testing Accuracy MLP1: 0.5595448194456403
Average Testing Accuracy MLP2: 0.5514214366471549
Average Testing Accuracy LOGR1: 0.6076306644791597
Average Testing Accuracy LOGR2: 0.5780423716983225
Average Testing Accuracy BNB1: 0.5290726041324537
Average Testing Accuracy CNB1: 0.5660893060174865
Average Testing Accuracy GNB1: 0.5362276567987923
Average Testing Accuracy MNB1: 0.5660261702754863
In [62]:
# F1 Scores

svm1_f1 = []
svm2_f1 = []
svm3_f1 = []
svm4_f1 = []
svm5_f1 = []
svm6_f1 = []
knn1_f1 = []
knn2_f1 = []
rfc1_f1 = []
rfc2_f1 = []
rfc3_f1 = []
mlp1_f1 = []
mlp2_f1 = []
logr1_f1 = []
logr2_f1 = []
bnb1_f1 = []
cnb1_f1 = []
gnb1_f1 = []
mnb1_f1 = []

for i,fold in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    svm1_f1 += [f1_score(fold, svm1_predy[i])]
#     svm2_f1 += [f1_score(fold, svm2_predy[i])]
#     svm3_f1 += [f1_score(fold, svm3_predy[i])]
#     svm4_f1 += [f1_score(fold, svm4_predy[i])]
    svm5_f1 += [f1_score(fold, svm5_predy[i])]
    svm6_f1 += [f1_score(fold, svm6_predy[i])]
    knn1_f1 += [f1_score(fold, knn1_predy[i])]
    knn2_f1 += [f1_score(fold, knn2_predy[i])]
    rfc1_f1 += [f1_score(fold, rfc1_predy[i])]
    rfc2_f1 += [f1_score(fold, rfc2_predy[i])]
    rfc3_f1 += [f1_score(fold, rfc3_predy[i])]
    mlp1_f1 += [f1_score(fold, mlp1_predy[i])]
    mlp2_f1 += [f1_score(fold, mlp2_predy[i])]
    logr1_f1 += [f1_score(fold, logr1_predy[i])]
    logr2_f1 += [f1_score(fold, logr2_predy[i])]
    bnb1_f1 += [f1_score(fold, bnb1_predy[i])]
    cnb1_f1 += [f1_score(fold, cnb1_predy[i])]
    gnb1_f1 += [f1_score(fold, gnb1_predy[i])]
    mnb1_f1 += [f1_score(fold, mnb1_predy[i])]
    
    print('SVM1'+str(i+1), svm1_f1[i])
#     print('SVM2'+str(i+1), svm2_f1[i])
#     print('SVM3'+str(i+1), svm3_f1[i])
#     print('SVM4'+str(i+1), svm4_f1[i])
    print('SVM5'+str(i+1), svm5_f1[i])
    print('SVM6'+str(i+1), svm6_f1[i])
    print('KNN1'+str(i+1), knn1_f1[i])
    print('KNN2'+str(i+1), knn2_f1[i])
    print('RFC1'+str(i+1), rfc1_f1[i])
    print('RFC2'+str(i+1), rfc2_f1[i])
    print('RFC3'+str(i+1), rfc3_f1[i])
    print('MLP1'+str(i+1), mlp1_f1[i])
    print('MLP2'+str(i+1), mlp1_f1[i])
    print('LOGR1'+str(i+1), logr1_f1[i])
    print('LOGR2'+str(i+1), logr2_f1[i])
    print('BNB1'+str(i+1), bnb1_f1[i])
    print('CNB1'+str(i+1), cnb1_f1[i])
    print('GNB1'+str(i+1), gnb1_f1[i])
    print('MNB1'+str(i+1), mnb1_f1[i])
SVM11 0.5993555316863587
SVM51 0.619367396593674
SVM61 0.6052798310454066
KNN11 0.7478327383987762
KNN21 0.8024205748865356
RFC11 0.983771569433032
RFC21 0.9938925081433225
RFC31 0.9998981773750127
MLP11 0.6183719193427931
MLP21 0.6183719193427931
LOGR11 0.6172712283594394
LOGR21 0.5863863659557098
BNB11 0.6487847085660348
CNB11 0.5497990659280982
GNB11 0.658627664540221
MNB11 0.5497774400173705
SVM12 0.5941459152468327
SVM52 0.6123650637880276
SVM62 0.5973522763965128
KNN12 0.7444240356861717
KNN22 0.793427715200992
RFC12 0.9835408313990294
RFC22 0.9923680083638264
RFC32 0.999791013584117
MLP12 0.5961226243911757
MLP22 0.5961226243911757
LOGR12 0.6081109476780837
LOGR22 0.5902484279971137
BNB12 0.6460437375745527
CNB12 0.5453942275793432
GNB12 0.6535166994106091
MNB12 0.5454947484798232
SVM13 0.5904906314307377
SVM53 0.6105387106270239
SVM63 0.5994865211810012
KNN13 0.7426870308050738
KNN23 0.7950501124974433
RFC13 0.9819126819126819
RFC23 0.9944364310735628
RFC33 1.0
MLP13 0.6014224751066857
MLP23 0.6014224751066857
LOGR13 0.6079900124843945
LOGR23 0.5871842267406039
BNB13 0.6403118040089087
CNB13 0.5497702909647779
GNB13 0.6503288443470091
MNB13 0.5495298491143669
SVM14 0.6010964205095131
SVM54 0.6163177460379574
SVM64 0.6072030328559395
KNN14 0.7435764339475733
KNN24 0.7941511977600332
RFC14 0.9827568188943463
RFC24 0.9940107393638993
RFC34 0.9998967262212124
MLP14 0.6016260162601627
MLP24 0.6016260162601627
LOGR14 0.6141781033949024
LOGR24 0.5906163753449862
BNB14 0.6430995028801388
CNB14 0.548186867036916
GNB14 0.6525920713112832
MNB14 0.5482852476864453
SVM15 0.5998273257068855
SVM55 0.6211960197082407
SVM65 0.6032384379299396
KNN15 0.456651913564176
KNN25 0.5946992864424057
RFC15 0.6093956771063079
RFC25 0.6468818096918129
RFC35 0.6544437538844002
MLP15 0.6060093896713615
MLP25 0.6060093896713615
LOGR15 0.6189395502372601
LOGR25 0.5953441295546559
BNB15 0.6535414542317045
CNB15 0.5585954330030158
GNB15 0.663350948088281
MNB15 0.5591559909570459
In [63]:
print('Average F1-Score SVM1:', sum(svm1_f1)/n_folds)
# print('Average F1-Score SVM2:', sum(svm2_f1)/n_folds)
# print('Average F1-Score SVM3:', sum(svm3_f1)/n_folds)
# print('Average F1-Score SVM4:', sum(svm4_f1)/n_folds)
print('Average F1-Score SVM5:', sum(svm5_f1)/n_folds)
print('Average F1-Score SVM6:', sum(svm6_f1)/n_folds)
print('Average F1-Score KNN1:', sum(knn1_f1)/n_folds)
print('Average F1-Score KNN2:', sum(knn2_f1)/n_folds)
print('Average F1-Score RFC1:', sum(rfc1_f1)/n_folds)
print('Average F1-Score RFC2:', sum(rfc2_f1)/n_folds)
print('Average F1-Score RFC3:', sum(rfc3_f1)/n_folds)
print('Average F1-Score MLP1:', sum(mlp1_f1)/n_folds)
print('Average F1-Score MLP2:', sum(mlp2_f1)/n_folds)
print('Average F1-Score LOGR1:', sum(logr1_f1)/n_folds)
print('Average F1-Score LOGR2:', sum(logr2_f1)/n_folds)
print('Average F1-Score BNB1:', sum(bnb1_f1)/n_folds)
print('Average F1-Score CNB1:', sum(cnb1_f1)/n_folds)
print('Average F1-Score GNB1:', sum(gnb1_f1)/n_folds)
print('Average F1-Score MNB1:', sum(mnb1_f1)/n_folds)
Average F1-Score SVM1: 0.5969831649160654
Average F1-Score SVM5: 0.6159569873509847
Average F1-Score SVM6: 0.60251201988176
Average F1-Score KNN1: 0.6870344304803542
Average F1-Score KNN2: 0.7559497773574819
Average F1-Score RFC1: 0.9082755157490794
Average F1-Score RFC2: 0.9243178993272847
Average F1-Score RFC3: 0.9308059342129484
Average F1-Score MLP1: 0.6047104849544358
Average F1-Score MLP2: 0.3469185394650039
Average F1-Score LOGR1: 0.613297968430816
Average F1-Score LOGR2: 0.5899559051186138
Average F1-Score BNB1: 0.6463562414522679
Average F1-Score CNB1: 0.5503491769024302
Average F1-Score GNB1: 0.6556832455394807
Average F1-Score MNB1: 0.5504486552510104
In [64]:
# estimators = []
# for i in range(n_folds):
# #     estimators += ('psvm', svm1[i])
# #     estimators += ('psvm', svm2[i])
# #     estimators += ('psvm', svm3[i])
# #     estimators += ('psvm', svm4[i])
#     estimators += ('rsvm', svm5[i])
# #     estimators += ('lsvm', svm6[i])
#     estimators += ('knn', knn1[i])
#     estimators += ('knn', knn2[i])
#     estimators += ('rf', rfc1[i])
#     estimators += ('rf', rfc2[i])
#     estimators += ('rf', rfc3[i])
#     estimators += ('mlp', mlp1[i])
#     estimators += ('mlp', mlp2[i])
#     estimators += ('lr', logr1[i])
#     estimators += ('lr', logr2[i])
#     estimators += ('bnb', bnb1[i])
#     estimators += ('cnb', cnb1[i])
#     estimators += ('gnb', gnb1[i])
#     estimators += ('mnb', mnb1[i])
# vclf1 = VotingClassifier(estimators=estimators,  voting='hard')
# vclf2 = VotingClassifier(estimators=estimators,  voting='soft')

# vclf1.fit()

# vclf1_pred = []
# vclf2_pred = []
# vclf1_pred_train = []
# vclf2_pred_train = []

# for i in range(n_folds):
#     vclf1_pred_train += [vclf1.predict(df_n[i])]
#     vclf2_pred_train += [vclf2.predict(df_n[i])]
# for i in [dfa, dfb, dfc, dfd, dfe]:
#     vclf1_pred += [vclf1.predict(i)]
#     vclf2_pred += [vclf2.predict(i)]
    
# for ind, i in zip(range(n_folds), [dfa, dfb, dfc, dfd, dfe]):
#     print('Hard Voting Classifier, Fold ' + str(i+1) + ' :', accuracy_score(fold, vclf1_pred[i]))
#     print('Soft Voting Classifier, Fold ' + str(i+1) + ' :', accuracy_score(fold, vclf2_pred[i]))
# for i in range(n_folds):
#     print('Hard Voting Classifier, Fold ' + str(i+1) + ' :', f1_score(df_yn[i], vclf1_pred_train[i]))
#     print('Soft Voting Classifier, Fold ' + str(i+1) + ' :', f1_score(df_yn[i], vclf2_pred_train[i]))
In [65]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(svm1_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for poly 1 SVM for fold ' + str(i+1))
    plt.show()
In [66]:
# for i in range(n_folds):
#     fpr, tpr, _ = roc_curve(svm2_predy_train[i], df_y_n[i], drop_intermediate=False)
#     plt.plot(fpr, tpr, color='red')
#     plt.xlabel('fpr')
#     plt.ylabel('tpr')
#     plt.title('ROC curve for poly 2 SVM for fold ' + str(i+1))
#     plt.show()
In [67]:
# for i in range(n_folds):
#     fpr, tpr, _ = roc_curve(svm3_predy_train[i], df_y_n[i], drop_intermediate=False)
#     plt.plot(fpr, tpr, color='red')
#     plt.xlabel('fpr')
#     plt.ylabel('tpr')
#     plt.title('ROC curve for poly 3 SVM for fold ' + str(i+1))
#     plt.show()
In [68]:
# for i in range(n_folds):
#     fpr, tpr, _ = roc_curve(svm4_predy_train[i], df_y_n[i], drop_intermediate=False)
#     plt.plot(fpr, tpr, color='red')
#     plt.xlabel('fpr')
#     plt.ylabel('tpr')
#     plt.title('ROC curve for poly 4 SVM for fold ' + str(i+1))
#     plt.show()
In [69]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(svm5_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for RBF SVM for fold ' + str(i+1))
    plt.show()
In [70]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(svm6_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for linear SVM for fold ' + str(i+1))
    plt.show()
In [71]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(knn1_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for 2 K-Nearest Neighbours for fold ' + str(i+1))
    plt.show()
In [72]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(knn2_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for 3 K-Nearest Neighbours for fold ' + str(i+1))
    plt.show()
In [73]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(rfc1_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for 10 RFC for fold ' + str(i+1))
    plt.show()
In [74]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(rfc2_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for 15 RFC for fold ' + str(i+1))
    plt.show()
In [75]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(rfc3_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for 1000 RFC for fold ' + str(i+1))
    plt.show()
In [76]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(mlp1_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for 1000 MLP for fold ' + str(i+1))
    plt.show()
In [77]:
for i in range(n_folds):    
    fpr, tpr, _ = roc_curve(mlp2_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for 2000 MLP for fold ' + str(i+1))
    plt.show()
In [78]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(logr1_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for logistic regression (l1) for fold ' + str(i+1))
    plt.show()
In [79]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(logr2_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for logistic regression (l2) for fold ' + str(i+1))
    plt.show()
In [80]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(bnb1_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for Bernoulli NB for fold ' + str(i+1))
    plt.show()
In [81]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(cnb1_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for Complement NB for fold ' + str(i+1))
    plt.show()
In [82]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(gnb1_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for Gaussian NB for fold ' + str(i+1))
    plt.show()
In [83]:
for i in range(n_folds):
    fpr, tpr, _ = roc_curve(mnb1_predy_train[i], df_y_n[i], drop_intermediate=False)
    plt.plot(fpr, tpr, color='red')
    plt.xlabel('fpr')
    plt.ylabel('tpr')
    plt.title('ROC curve for Multinomial NB for fold ' + str(i+1))
    plt.show()
In [84]:
# Testing Data Confusion Matrices:
In [85]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm1 = confusion_matrix(y, svm1_predy[i])
    print(cm1)
    cm1df = pd.DataFrame(cm1, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm1df, annot=True)
[[2983 1609]
 [2121 2790]]
[[3068 1651]
 [2065 2720]]
[[2997 1650]
 [2131 2726]]
[[2997 1666]
 [2045 2796]]
[[3017 1586]
 [2122 2779]]
In [86]:
# for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
#     fig = plt.figure(i+1)
#     cm2 = confusion_matrix(y, svm2_predy[i])
#     print(cm2)
#     cm2df = pd.DataFrame(cm2, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
#     seaborn.heatmap(cm2df, annot=True)
In [87]:
# for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
#     fig = plt.figure(i+1)
#     cm3 = confusion_matrix(y, svm3_predy[i])
#     print(cm3)
#     cm3df = pd.DataFrame(cm3, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
#     seaborn.heatmap(cm3df, annot=True)
In [88]:
# for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
#     fig = plt.figure(i+1)
#     cm4 = confu3sion_matrix(y, svm4_predy[i])
#     print(cm4)
#     cm4df = pd.DataFrame(cm4, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
#     seaborn.heatmap(cm4df, annot=True)
In [89]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm5 = confusion_matrix(y, svm5_predy[i])
    print(cm5)
    cm5df = pd.DataFrame(cm5, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm5df, annot=True)
[[2410 2182]
 [1729 3182]]
[[2434 2285]
 [1665 3120]]
[[2424 2223]
 [1746 3111]]
[[2432 2231]
 [1691 3150]]
[[2368 2235]
 [1686 3215]]
In [90]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm6 = confusion_matrix(y, svm6_predy[i])
    print(cm6)
    cm6df = pd.DataFrame(cm6, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm6df, annot=True)
[[2899 1693]
 [2045 2866]]
[[2988 1731]
 [2010 2775]]
[[2958 1689]
 [2055 2802]]
[[2891 1772]
 [1958 2883]]
[[2905 1698]
 [2051 2850]]
In [91]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm7 = confusion_matrix(y, rfc1_predy[i])
    print(cm7)
    cm7df = pd.DataFrame(cm7, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm7df, annot=True)
[[4556   36]
 [ 122 4789]]
[[4687   32]
 [ 124 4661]]
[[4607   40]
 [ 134 4723]]
[[4637   26]
 [ 139 4702]]
[[3199 1404]
 [2138 2763]]
In [92]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm8 = confusion_matrix(y, rfc2_predy[i])
    print(cm8)
    cm8df = pd.DataFrame(cm8, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm8df, annot=True)
[[4561   31]
 [  29 4882]]
[[4685   34]
 [  39 4746]]
[[4624   23]
 [  31 4826]]
[[4633   30]
 [  28 4813]]
[[2984 1619]
 [1784 3117]]
In [93]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm9 = confusion_matrix(y, rfc3_predy[i])
    print(cm9)
    cm9df = pd.DataFrame(cm9, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm9df, annot=True)
[[4592    0]
 [   1 4910]]
[[4718    1]
 [   1 4784]]
[[4647    0]
 [   0 4857]]
[[4662    1]
 [   0 4841]]
[[3009 1594]
 [1742 3159]]
In [94]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm10 = confusion_matrix(y, knn1_predy[i])
    print(cm10)
    cm10df = pd.DataFrame(cm10, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm10df, annot=True)
[[4592    0]
 [1978 2933]]
[[4719    0]
 [1948 2837]]
[[4647    0]
 [1988 2869]]
[[4663    0]
 [1976 2865]]
[[3576 1027]
 [3147 1754]]
In [95]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm11 = confusion_matrix(y, knn2_predy[i])
    print(cm11)
    cm11df = pd.DataFrame(cm11, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm11df, annot=True)
[[3566 1026]
 [ 933 3978]]
[[3666 1053]
 [ 946 3839]]
[[3613 1034]
 [ 970 3887]]
[[3690  973]
 [1012 3829]]
[[2611 1992]
 [1984 2917]]
In [96]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm12 = confusion_matrix(y, mlp1_predy[i])
    print(cm12)
    cm12df = pd.DataFrame(cm12, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm12df, annot=True)
[[2103 2489]
 [1599 3312]]
[[2154 2565]
 [1664 3121]]
[[2130 2517]
 [1686 3171]]
[[2108 2555]
 [1659 3182]]
[[2081 2522]
 [1674 3227]]
In [97]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm13 = confusion_matrix(y, mlp2_predy[i])
    print(cm13)
    cm13df = pd.DataFrame(cm13, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm13df, annot=True)
[[4050  542]
 [3766 1145]]
[[4216  503]
 [3651 1134]]
[[4131  516]
 [3736 1121]]
[[4113  550]
 [3720 1121]]
[[4032  571]
 [3761 1140]]
In [98]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm14 = confusion_matrix(y, logr1_predy[i])
    print(cm14)
    cm14df = pd.DataFrame(cm14, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm14df, annot=True)
[[2794 1798]
 [1916 2995]]
[[2880 1839]
 [1891 2894]]
[[2814 1833]
 [1935 2922]]
[[2789 1874]
 [1865 2976]]
[[2810 1793]
 [1901 3000]]
In [99]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm15 = confusion_matrix(y, logr2_predy[i])
    print(cm15)
    cm15df = pd.DataFrame(cm15, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm15df, annot=True)
[[2577 2015]
 [2038 2873]]
[[2666 2053]
 [1922 2863]]
[[2625 2022]
 [1998 2859]]
[[2610 2053]
 [1952 2889]]
[[2565 2038]
 [1960 2941]]
In [100]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm16 = confusion_matrix(y, bnb1_predy[i])
    print(cm16)
    cm16df = pd.DataFrame(cm16, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm16df, annot=True)
[[ 914 3678]
 [ 787 4124]]
[[ 991 3728]
 [ 723 4062]]
[[ 957 3690]
 [ 832 4025]]
[[ 906 3757]
 [ 766 4075]]
[[ 921 3682]
 [ 735 4166]]
In [101]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm17 = confusion_matrix(y, cnb1_predy[i])
    print(cm17)
    cm17df = pd.DataFrame(cm17, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm17df, annot=True)
[[2827 1765]
 [2380 2531]]
[[2927 1792]
 [2319 2466]]
[[2875 1772]
 [2344 2513]]
[[2838 1825]
 [2324 2517]]
[[2813 1790]
 [2308 2593]]
In [102]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm18 = confusion_matrix(y, gnb1_predy[i])
    print(cm18)
    cm18df = pd.DataFrame(cm18, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm18df, annot=True)
[[ 882 3710]
 [ 678 4233]]
[[ 937 3782]
 [ 627 4158]]
[[ 885 3762]
 [ 704 4153]]
[[ 888 3775]
 [ 668 4173]]
[[ 904 3699]
 [ 633 4268]]
In [103]:
for i,y in zip(range(n_folds), [df_ya, df_yb, df_yc, df_yd, df_ye]):
    fig = plt.figure(i+1)
    cm19 = confusion_matrix(y, mnb1_predy[i])
    print(cm19)
    cm19df = pd.DataFrame(cm19, index = ["admitted",'not admitted'], columns = ["admitted",'not admitted'])
    seaborn.heatmap(cm19df, annot=True)
[[2824 1768]
 [2379 2532]]
[[2926 1793]
 [2318 2467]]
[[2871 1776]
 [2344 2513]]
[[2837 1826]
 [2323 2518]]
[[2812 1791]
 [2304 2597]]
In [104]:
#TensorFlow MLP
import tensorflow as tf

n_nodes_hl1 = 30
n_nodes_hl2 = 45
n_nodes_hl3 = 16

n_classes = 2
batch_size = 100

x = tf.placeholder('float', [None, 15])
y = tf.placeholder('float',[None,2])

def neuralnetwork(data):
    hiddenlayer1 = {'weights':tf.Variable(tf.random_normal([15, n_nodes_hl1])), 'biases':tf.Variable(tf.random_normal([n_nodes_hl1]))}
    hiddenlayer2 = {'weights':tf.Variable(tf.random_normal([n_nodes_hl1, n_nodes_hl2])), 'biases':tf.Variable(tf.random_normal([n_nodes_hl2]))}
    # hiddenlayer3 = {'weights':tf.Variable(tf.random_normal([n_nodes_hl2, n_nodes_hl3])), 'biases':tf.Variable(tf.random_normal([n_nodes_hl3]))}

    outputlayer = {'weights':tf.Variable(tf.random_normal([n_nodes_hl2, n_classes])),
                    'biases':tf.Variable(tf.random_normal([n_classes])),}

    l1 = tf.add(tf.matmul(data,hiddenlayer1['weights']), hiddenlayer1['biases'])
    l1 = tf.nn.tanh(l1)
    l2 = tf.add(tf.matmul(l1,hiddenlayer2['weights']), hiddenlayer2['biases'])
    l2 = tf.nn.tanh(l2)
    
    # l3 = tf.add(tf.matmul(l2,hiddenlayer3['weights']), hiddenlayer3['biases'])
    # l3 = tf.nn.tanh(l3)
    output = tf.matmul(l2,outputlayer['weights']) + outputlayer['biases']
    output=tf.nn.softmax(output)
    return output
In [105]:
cwd = os.getcwd()
print(cwd)
C:\Users\tanis_vt1gg0x\IDE Workspaces\Jupyter Notebooks\ML Project
In [106]:
from sklearn.preprocessing import OneHotEncoder
enc = OneHotEncoder(handle_unknown='ignore')
df_yaten = enc.fit_transform(df_ya.reshape(-1,1)).toarray()
df_ybcdeten = enc.fit_transform(df_ybcde.reshape(-1,1)).toarray()
df_ybten = enc.fit_transform(df_yb.reshape(-1,1)).toarray()
df_yacdeten = enc.fit_transform(df_yacde.reshape(-1,1)).toarray()
df_ycten = enc.fit_transform(df_yc.reshape(-1,1)).toarray()
df_yabdeten = enc.fit_transform(df_yabde.reshape(-1,1)).toarray()
df_ydten = enc.fit_transform(df_yd.reshape(-1,1)).toarray()
df_yabceten = enc.fit_transform(df_yabce.reshape(-1,1)).toarray()
df_yeten = enc.fit_transform(df_ye.reshape(-1,1)).toarray()
df_yabcdten = enc.fit_transform(df_yabcd.reshape(-1,1)).toarray()
In [107]:
def train_neural_network(x):
    prediction = neuralnetwork(x)
    eploss=[]
    eplosstrain=[]
    cost = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(logits=prediction, labels=y))

    optimizer = tf.train.AdamOptimizer(learning_rate=0.0001).minimize(cost)
    # print(optimizer)
    
    hm_epochs = 1000
    with tf.Session() as sess:
        # OLD:
        # sess.run(tf.initialize_all_variables())
        # NEW:
        sess.run(tf.global_variables_initializer())

        for epoch in range(hm_epochs):
            epoch_loss = 0
            epoch_losstrain=0
            # for i in range(40613):
            #     epoch_x, epoch_y =dfbcde[i].reshape(1,-1),df_ybcde[i].reshape(1,-1) 
            #     # print(type((epoch_x)))
            #     lam, c = sess.run([optimizer, cost], feed_dict={x: (epoch_x), y: (epoch_y)})
            #     epoch_loss += c
            #     print(lam,c)
            # for index in range(0,38868,10000):
            #   epoch_x, epoch_y =dfbcde[index:index+10000,:],df_ybcdeten[index:index+10000,:]
            #   lam, c = sess.run([optimizer, cost], feed_dict={x: (epoch_x), y: (epoch_y)})
            #   # lam, c1 = sess.run([optimizer, cost], feed_dict={x: (dfa), y: (df_yaten)})
            epoch_x, epoch_y =dfbcde,df_ybcdeten
            lam, c = sess.run([optimizer, cost], feed_dict={x: (epoch_x), y: (epoch_y)})
            epoch_loss += c
            eploss.append(epoch_loss)

            print('Epoch', epoch, 'completed out of',hm_epochs,'loss:',epoch_loss)
        print(prediction)
        predy=prediction.eval({x:dfa,y:df_yaten})

        correct = tf.equal(tf.argmax(prediction, 1), tf.argmax(y, 1))
        accuracy = tf.reduce_mean(tf.cast(correct, 'float'))
        print("accuracy on train:",accuracy.eval({x:dfbcde,y:df_ybcdeten}))
        print('Accuracy on test :',accuracy.eval({x:dfa, y:df_yaten}))
    return eploss,predy
In [108]:
losslist, predy = train_neural_network(x)
WARNING:tensorflow:From <ipython-input-107-72555f97bc27>:5: softmax_cross_entropy_with_logits (from tensorflow.python.ops.nn_ops) is deprecated and will be removed in a future version.
Instructions for updating:

Future major versions of TensorFlow will allow gradients to flow
into the labels input on backprop by default.

See `tf.nn.softmax_cross_entropy_with_logits_v2`.

Epoch 0 completed out of 1000 loss: 0.789579451084137
Epoch 1 completed out of 1000 loss: 0.7893659472465515
Epoch 2 completed out of 1000 loss: 0.7891513705253601
Epoch 3 completed out of 1000 loss: 0.7889324426651001
Epoch 4 completed out of 1000 loss: 0.7887110114097595
Epoch 5 completed out of 1000 loss: 0.788486659526825
Epoch 6 completed out of 1000 loss: 0.7882596254348755
Epoch 7 completed out of 1000 loss: 0.7880343794822693
Epoch 8 completed out of 1000 loss: 0.7878027558326721
Epoch 9 completed out of 1000 loss: 0.7875715494155884
Epoch 10 completed out of 1000 loss: 0.7873350381851196
Epoch 11 completed out of 1000 loss: 0.7870920300483704
Epoch 12 completed out of 1000 loss: 0.7868433594703674
Epoch 13 completed out of 1000 loss: 0.7865957021713257
Epoch 14 completed out of 1000 loss: 0.7863467335700989
Epoch 15 completed out of 1000 loss: 0.7860976457595825
Epoch 16 completed out of 1000 loss: 0.7858461141586304
Epoch 17 completed out of 1000 loss: 0.7855923175811768
Epoch 18 completed out of 1000 loss: 0.7853323221206665
Epoch 19 completed out of 1000 loss: 0.7850659489631653
Epoch 20 completed out of 1000 loss: 0.7847972512245178
Epoch 21 completed out of 1000 loss: 0.7845233082771301
Epoch 22 completed out of 1000 loss: 0.78424471616745
Epoch 23 completed out of 1000 loss: 0.783962070941925
Epoch 24 completed out of 1000 loss: 0.7836786508560181
Epoch 25 completed out of 1000 loss: 0.7833912372589111
Epoch 26 completed out of 1000 loss: 0.7831016778945923
Epoch 27 completed out of 1000 loss: 0.7828037738800049
Epoch 28 completed out of 1000 loss: 0.7825073599815369
Epoch 29 completed out of 1000 loss: 0.7822021842002869
Epoch 30 completed out of 1000 loss: 0.7818928956985474
Epoch 31 completed out of 1000 loss: 0.7815758585929871
Epoch 32 completed out of 1000 loss: 0.7812535762786865
Epoch 33 completed out of 1000 loss: 0.7809247970581055
Epoch 34 completed out of 1000 loss: 0.7805858850479126
Epoch 35 completed out of 1000 loss: 0.7802433371543884
Epoch 36 completed out of 1000 loss: 0.7798951268196106
Epoch 37 completed out of 1000 loss: 0.7795438766479492
Epoch 38 completed out of 1000 loss: 0.7791880369186401
Epoch 39 completed out of 1000 loss: 0.7788295149803162
Epoch 40 completed out of 1000 loss: 0.778462827205658
Epoch 41 completed out of 1000 loss: 0.7780914902687073
Epoch 42 completed out of 1000 loss: 0.7777054309844971
Epoch 43 completed out of 1000 loss: 0.7773157954216003
Epoch 44 completed out of 1000 loss: 0.7769222259521484
Epoch 45 completed out of 1000 loss: 0.7765297293663025
Epoch 46 completed out of 1000 loss: 0.7761275768280029
Epoch 47 completed out of 1000 loss: 0.7757206559181213
Epoch 48 completed out of 1000 loss: 0.7753062844276428
Epoch 49 completed out of 1000 loss: 0.7748857140541077
Epoch 50 completed out of 1000 loss: 0.7744572758674622
Epoch 51 completed out of 1000 loss: 0.7740201354026794
Epoch 52 completed out of 1000 loss: 0.7735753059387207
Epoch 53 completed out of 1000 loss: 0.7731207013130188
Epoch 54 completed out of 1000 loss: 0.7726662158966064
Epoch 55 completed out of 1000 loss: 0.7722041606903076
Epoch 56 completed out of 1000 loss: 0.7717385292053223
Epoch 57 completed out of 1000 loss: 0.7712657451629639
Epoch 58 completed out of 1000 loss: 0.7707857489585876
Epoch 59 completed out of 1000 loss: 0.7703009247779846
Epoch 60 completed out of 1000 loss: 0.769810140132904
Epoch 61 completed out of 1000 loss: 0.7693129181861877
Epoch 62 completed out of 1000 loss: 0.7688106298446655
Epoch 63 completed out of 1000 loss: 0.768304705619812
Epoch 64 completed out of 1000 loss: 0.7677921652793884
Epoch 65 completed out of 1000 loss: 0.7672721743583679
Epoch 66 completed out of 1000 loss: 0.7667438983917236
Epoch 67 completed out of 1000 loss: 0.7662073373794556
Epoch 68 completed out of 1000 loss: 0.7656647562980652
Epoch 69 completed out of 1000 loss: 0.7651235461235046
Epoch 70 completed out of 1000 loss: 0.7645756006240845
Epoch 71 completed out of 1000 loss: 0.764028012752533
Epoch 72 completed out of 1000 loss: 0.7634750604629517
Epoch 73 completed out of 1000 loss: 0.7629143595695496
Epoch 74 completed out of 1000 loss: 0.762354850769043
Epoch 75 completed out of 1000 loss: 0.7617899775505066
Epoch 76 completed out of 1000 loss: 0.7612251043319702
Epoch 77 completed out of 1000 loss: 0.7606570720672607
Epoch 78 completed out of 1000 loss: 0.7600825428962708
Epoch 79 completed out of 1000 loss: 0.7595067620277405
Epoch 80 completed out of 1000 loss: 0.7589336633682251
Epoch 81 completed out of 1000 loss: 0.7583549618721008
Epoch 82 completed out of 1000 loss: 0.7577797174453735
Epoch 83 completed out of 1000 loss: 0.7572038173675537
Epoch 84 completed out of 1000 loss: 0.7566272020339966
Epoch 85 completed out of 1000 loss: 0.7560544013977051
Epoch 86 completed out of 1000 loss: 0.7554798126220703
Epoch 87 completed out of 1000 loss: 0.7549064755439758
Epoch 88 completed out of 1000 loss: 0.754339873790741
Epoch 89 completed out of 1000 loss: 0.7537733912467957
Epoch 90 completed out of 1000 loss: 0.7532109618186951
Epoch 91 completed out of 1000 loss: 0.7526510953903198
Epoch 92 completed out of 1000 loss: 0.7520962357521057
Epoch 93 completed out of 1000 loss: 0.7515447735786438
Epoch 94 completed out of 1000 loss: 0.7510011196136475
Epoch 95 completed out of 1000 loss: 0.7504652738571167
Epoch 96 completed out of 1000 loss: 0.749934196472168
Epoch 97 completed out of 1000 loss: 0.7494073510169983
Epoch 98 completed out of 1000 loss: 0.7488862872123718
Epoch 99 completed out of 1000 loss: 0.748374342918396
Epoch 100 completed out of 1000 loss: 0.7478736042976379
Epoch 101 completed out of 1000 loss: 0.7473822832107544
Epoch 102 completed out of 1000 loss: 0.746900200843811
Epoch 103 completed out of 1000 loss: 0.7464286088943481
Epoch 104 completed out of 1000 loss: 0.7459647059440613
Epoch 105 completed out of 1000 loss: 0.745512068271637
Epoch 106 completed out of 1000 loss: 0.7450705170631409
Epoch 107 completed out of 1000 loss: 0.7446376085281372
Epoch 108 completed out of 1000 loss: 0.7442188262939453
Epoch 109 completed out of 1000 loss: 0.7438134551048279
Epoch 110 completed out of 1000 loss: 0.743417501449585
Epoch 111 completed out of 1000 loss: 0.7430300712585449
Epoch 112 completed out of 1000 loss: 0.7426575422286987
Epoch 113 completed out of 1000 loss: 0.7422861456871033
Epoch 114 completed out of 1000 loss: 0.74192875623703
Epoch 115 completed out of 1000 loss: 0.7415752410888672
Epoch 116 completed out of 1000 loss: 0.7412265539169312
Epoch 117 completed out of 1000 loss: 0.7408857345581055
Epoch 118 completed out of 1000 loss: 0.7405568957328796
Epoch 119 completed out of 1000 loss: 0.7402346730232239
Epoch 120 completed out of 1000 loss: 0.7399137616157532
Epoch 121 completed out of 1000 loss: 0.7396008968353271
Epoch 122 completed out of 1000 loss: 0.739296555519104
Epoch 123 completed out of 1000 loss: 0.7390028238296509
Epoch 124 completed out of 1000 loss: 0.7387223243713379
Epoch 125 completed out of 1000 loss: 0.7384452819824219
Epoch 126 completed out of 1000 loss: 0.7381781935691833
Epoch 127 completed out of 1000 loss: 0.7379130125045776
Epoch 128 completed out of 1000 loss: 0.7376468181610107
Epoch 129 completed out of 1000 loss: 0.7373868823051453
Epoch 130 completed out of 1000 loss: 0.7371317148208618
Epoch 131 completed out of 1000 loss: 0.7368781566619873
Epoch 132 completed out of 1000 loss: 0.7366353273391724
Epoch 133 completed out of 1000 loss: 0.7363938689231873
Epoch 134 completed out of 1000 loss: 0.7361587882041931
Epoch 135 completed out of 1000 loss: 0.7359297275543213
Epoch 136 completed out of 1000 loss: 0.7357114553451538
Epoch 137 completed out of 1000 loss: 0.7355030179023743
Epoch 138 completed out of 1000 loss: 0.735298752784729
Epoch 139 completed out of 1000 loss: 0.7350938320159912
Epoch 140 completed out of 1000 loss: 0.7348888516426086
Epoch 141 completed out of 1000 loss: 0.7346855401992798
Epoch 142 completed out of 1000 loss: 0.7344781160354614
Epoch 143 completed out of 1000 loss: 0.7342737913131714
Epoch 144 completed out of 1000 loss: 0.7340659499168396
Epoch 145 completed out of 1000 loss: 0.733855664730072
Epoch 146 completed out of 1000 loss: 0.7336438298225403
Epoch 147 completed out of 1000 loss: 0.733432412147522
Epoch 148 completed out of 1000 loss: 0.7332276701927185
Epoch 149 completed out of 1000 loss: 0.7330257892608643
Epoch 150 completed out of 1000 loss: 0.732832670211792
Epoch 151 completed out of 1000 loss: 0.7326434850692749
Epoch 152 completed out of 1000 loss: 0.7324452996253967
Epoch 153 completed out of 1000 loss: 0.7322471737861633
Epoch 154 completed out of 1000 loss: 0.732059895992279
Epoch 155 completed out of 1000 loss: 0.731873095035553
Epoch 156 completed out of 1000 loss: 0.7316850423812866
Epoch 157 completed out of 1000 loss: 0.7315064072608948
Epoch 158 completed out of 1000 loss: 0.7313327193260193
Epoch 159 completed out of 1000 loss: 0.731160581111908
Epoch 160 completed out of 1000 loss: 0.7309862375259399
Epoch 161 completed out of 1000 loss: 0.7308199405670166
Epoch 162 completed out of 1000 loss: 0.7306566834449768
Epoch 163 completed out of 1000 loss: 0.7304965853691101
Epoch 164 completed out of 1000 loss: 0.730339765548706
Epoch 165 completed out of 1000 loss: 0.7301840782165527
Epoch 166 completed out of 1000 loss: 0.7300208210945129
Epoch 167 completed out of 1000 loss: 0.7298537492752075
Epoch 168 completed out of 1000 loss: 0.7296933531761169
Epoch 169 completed out of 1000 loss: 0.7295411229133606
Epoch 170 completed out of 1000 loss: 0.7293989658355713
Epoch 171 completed out of 1000 loss: 0.7292681932449341
Epoch 172 completed out of 1000 loss: 0.7291383147239685
Epoch 173 completed out of 1000 loss: 0.7290109992027283
Epoch 174 completed out of 1000 loss: 0.7288831472396851
Epoch 175 completed out of 1000 loss: 0.7287532687187195
Epoch 176 completed out of 1000 loss: 0.7286255955696106
Epoch 177 completed out of 1000 loss: 0.7285012602806091
Epoch 178 completed out of 1000 loss: 0.7283769845962524
Epoch 179 completed out of 1000 loss: 0.7282534837722778
Epoch 180 completed out of 1000 loss: 0.7281315326690674
Epoch 181 completed out of 1000 loss: 0.7280111312866211
Epoch 182 completed out of 1000 loss: 0.7278894782066345
Epoch 183 completed out of 1000 loss: 0.7277696132659912
Epoch 184 completed out of 1000 loss: 0.7276455760002136
Epoch 185 completed out of 1000 loss: 0.7275288701057434
Epoch 186 completed out of 1000 loss: 0.7274131178855896
Epoch 187 completed out of 1000 loss: 0.7273038625717163
Epoch 188 completed out of 1000 loss: 0.7271985411643982
Epoch 189 completed out of 1000 loss: 0.7270969152450562
Epoch 190 completed out of 1000 loss: 0.7269933223724365
Epoch 191 completed out of 1000 loss: 0.7268905639648438
Epoch 192 completed out of 1000 loss: 0.7267875075340271
Epoch 193 completed out of 1000 loss: 0.7266814708709717
Epoch 194 completed out of 1000 loss: 0.7265778183937073
Epoch 195 completed out of 1000 loss: 0.7264764904975891
Epoch 196 completed out of 1000 loss: 0.7263782620429993
Epoch 197 completed out of 1000 loss: 0.7262824177742004
Epoch 198 completed out of 1000 loss: 0.7261860966682434
Epoch 199 completed out of 1000 loss: 0.7260928153991699
Epoch 200 completed out of 1000 loss: 0.7259960174560547
Epoch 201 completed out of 1000 loss: 0.7259025573730469
Epoch 202 completed out of 1000 loss: 0.7258045673370361
Epoch 203 completed out of 1000 loss: 0.7257117033004761
Epoch 204 completed out of 1000 loss: 0.7256172299385071
Epoch 205 completed out of 1000 loss: 0.7255232930183411
Epoch 206 completed out of 1000 loss: 0.7254307866096497
Epoch 207 completed out of 1000 loss: 0.7253400683403015
Epoch 208 completed out of 1000 loss: 0.7252501249313354
Epoch 209 completed out of 1000 loss: 0.7251613736152649
Epoch 210 completed out of 1000 loss: 0.7250728607177734
Epoch 211 completed out of 1000 loss: 0.7249869108200073
Epoch 212 completed out of 1000 loss: 0.7249016761779785
Epoch 213 completed out of 1000 loss: 0.7248201966285706
Epoch 214 completed out of 1000 loss: 0.7247394323348999
Epoch 215 completed out of 1000 loss: 0.7246575951576233
Epoch 216 completed out of 1000 loss: 0.7245778441429138
Epoch 217 completed out of 1000 loss: 0.7244959473609924
Epoch 218 completed out of 1000 loss: 0.7244133353233337
Epoch 219 completed out of 1000 loss: 0.7243310213088989
Epoch 220 completed out of 1000 loss: 0.7242536544799805
Epoch 221 completed out of 1000 loss: 0.7241774201393127
Epoch 222 completed out of 1000 loss: 0.7241030335426331
Epoch 223 completed out of 1000 loss: 0.7240284085273743
Epoch 224 completed out of 1000 loss: 0.7239560484886169
Epoch 225 completed out of 1000 loss: 0.7238841652870178
Epoch 226 completed out of 1000 loss: 0.723811686038971
Epoch 227 completed out of 1000 loss: 0.723734974861145
Epoch 228 completed out of 1000 loss: 0.7236617207527161
Epoch 229 completed out of 1000 loss: 0.7235920429229736
Epoch 230 completed out of 1000 loss: 0.7235243320465088
Epoch 231 completed out of 1000 loss: 0.7234580516815186
Epoch 232 completed out of 1000 loss: 0.7233933806419373
Epoch 233 completed out of 1000 loss: 0.7233301997184753
Epoch 234 completed out of 1000 loss: 0.7232656478881836
Epoch 235 completed out of 1000 loss: 0.7232014536857605
Epoch 236 completed out of 1000 loss: 0.7231402397155762
Epoch 237 completed out of 1000 loss: 0.7230818271636963
Epoch 238 completed out of 1000 loss: 0.7230208516120911
Epoch 239 completed out of 1000 loss: 0.7229616045951843
Epoch 240 completed out of 1000 loss: 0.7229036688804626
Epoch 241 completed out of 1000 loss: 0.7228423357009888
Epoch 242 completed out of 1000 loss: 0.7227830290794373
Epoch 243 completed out of 1000 loss: 0.7227250933647156
Epoch 244 completed out of 1000 loss: 0.7226672172546387
Epoch 245 completed out of 1000 loss: 0.7226119637489319
Epoch 246 completed out of 1000 loss: 0.7225545048713684
Epoch 247 completed out of 1000 loss: 0.7224975228309631
Epoch 248 completed out of 1000 loss: 0.7224393486976624
Epoch 249 completed out of 1000 loss: 0.7223847508430481
Epoch 250 completed out of 1000 loss: 0.7223291397094727
Epoch 251 completed out of 1000 loss: 0.7222747802734375
Epoch 252 completed out of 1000 loss: 0.7222214937210083
Epoch 253 completed out of 1000 loss: 0.7221657633781433
Epoch 254 completed out of 1000 loss: 0.7221124768257141
Epoch 255 completed out of 1000 loss: 0.7220606803894043
Epoch 256 completed out of 1000 loss: 0.7220073342323303
Epoch 257 completed out of 1000 loss: 0.7219546437263489
Epoch 258 completed out of 1000 loss: 0.721902072429657
Epoch 259 completed out of 1000 loss: 0.7218503952026367
Epoch 260 completed out of 1000 loss: 0.7217970490455627
Epoch 261 completed out of 1000 loss: 0.7217444777488708
Epoch 262 completed out of 1000 loss: 0.7216930389404297
Epoch 263 completed out of 1000 loss: 0.7216376662254333
Epoch 264 completed out of 1000 loss: 0.7215822339057922
Epoch 265 completed out of 1000 loss: 0.7215293049812317
Epoch 266 completed out of 1000 loss: 0.7214744687080383
Epoch 267 completed out of 1000 loss: 0.7214208841323853
Epoch 268 completed out of 1000 loss: 0.7213643193244934
Epoch 269 completed out of 1000 loss: 0.7213094830513
Epoch 270 completed out of 1000 loss: 0.7212552428245544
Epoch 271 completed out of 1000 loss: 0.7211963534355164
Epoch 272 completed out of 1000 loss: 0.721138596534729
Epoch 273 completed out of 1000 loss: 0.7210810780525208
Epoch 274 completed out of 1000 loss: 0.7210204005241394
Epoch 275 completed out of 1000 loss: 0.7209606170654297
Epoch 276 completed out of 1000 loss: 0.7209041118621826
Epoch 277 completed out of 1000 loss: 0.7208457589149475
Epoch 278 completed out of 1000 loss: 0.7207900285720825
Epoch 279 completed out of 1000 loss: 0.7207268476486206
Epoch 280 completed out of 1000 loss: 0.7206624150276184
Epoch 281 completed out of 1000 loss: 0.7205943465232849
Epoch 282 completed out of 1000 loss: 0.7205306887626648
Epoch 283 completed out of 1000 loss: 0.7204684615135193
Epoch 284 completed out of 1000 loss: 0.7204038500785828
Epoch 285 completed out of 1000 loss: 0.7203386425971985
Epoch 286 completed out of 1000 loss: 0.7202771902084351
Epoch 287 completed out of 1000 loss: 0.7202138304710388
Epoch 288 completed out of 1000 loss: 0.7201524972915649
Epoch 289 completed out of 1000 loss: 0.7200825810432434
Epoch 290 completed out of 1000 loss: 0.7200040221214294
Epoch 291 completed out of 1000 loss: 0.719927191734314
Epoch 292 completed out of 1000 loss: 0.7198439240455627
Epoch 293 completed out of 1000 loss: 0.7197763919830322
Epoch 294 completed out of 1000 loss: 0.719706118106842
Epoch 295 completed out of 1000 loss: 0.7196333408355713
Epoch 296 completed out of 1000 loss: 0.71953946352005
Epoch 297 completed out of 1000 loss: 0.7194422483444214
Epoch 298 completed out of 1000 loss: 0.7193571925163269
Epoch 299 completed out of 1000 loss: 0.7192692160606384
Epoch 300 completed out of 1000 loss: 0.7191860675811768
Epoch 301 completed out of 1000 loss: 0.7191314101219177
Epoch 302 completed out of 1000 loss: 0.719071090221405
Epoch 303 completed out of 1000 loss: 0.71900475025177
Epoch 304 completed out of 1000 loss: 0.7189466953277588
Epoch 305 completed out of 1000 loss: 0.7189035415649414
Epoch 306 completed out of 1000 loss: 0.7188603281974792
Epoch 307 completed out of 1000 loss: 0.7188130617141724
Epoch 308 completed out of 1000 loss: 0.7187686562538147
Epoch 309 completed out of 1000 loss: 0.7187233567237854
Epoch 310 completed out of 1000 loss: 0.7186737656593323
Epoch 311 completed out of 1000 loss: 0.7186231017112732
Epoch 312 completed out of 1000 loss: 0.7185731530189514
Epoch 313 completed out of 1000 loss: 0.7185232043266296
Epoch 314 completed out of 1000 loss: 0.7184725403785706
Epoch 315 completed out of 1000 loss: 0.7184244990348816
Epoch 316 completed out of 1000 loss: 0.7183765172958374
Epoch 317 completed out of 1000 loss: 0.718329131603241
Epoch 318 completed out of 1000 loss: 0.71827632188797
Epoch 319 completed out of 1000 loss: 0.7182226181030273
Epoch 320 completed out of 1000 loss: 0.7181658744812012
Epoch 321 completed out of 1000 loss: 0.7181040048599243
Epoch 322 completed out of 1000 loss: 0.7180363535881042
Epoch 323 completed out of 1000 loss: 0.7179673314094543
Epoch 324 completed out of 1000 loss: 0.717901885509491
Epoch 325 completed out of 1000 loss: 0.7178434133529663
Epoch 326 completed out of 1000 loss: 0.7177950143814087
Epoch 327 completed out of 1000 loss: 0.7177489995956421
Epoch 328 completed out of 1000 loss: 0.717700183391571
Epoch 329 completed out of 1000 loss: 0.7176485657691956
Epoch 330 completed out of 1000 loss: 0.7175939083099365
Epoch 331 completed out of 1000 loss: 0.717538058757782
Epoch 332 completed out of 1000 loss: 0.7174773812294006
Epoch 333 completed out of 1000 loss: 0.7174106240272522
Epoch 334 completed out of 1000 loss: 0.7173352241516113
Epoch 335 completed out of 1000 loss: 0.717262327671051
Epoch 336 completed out of 1000 loss: 0.7171975374221802
Epoch 337 completed out of 1000 loss: 0.7171491384506226
Epoch 338 completed out of 1000 loss: 0.717116117477417
Epoch 339 completed out of 1000 loss: 0.7170795202255249
Epoch 340 completed out of 1000 loss: 0.7170389294624329
Epoch 341 completed out of 1000 loss: 0.7169913649559021
Epoch 342 completed out of 1000 loss: 0.7169381380081177
Epoch 343 completed out of 1000 loss: 0.7168856263160706
Epoch 344 completed out of 1000 loss: 0.7168357372283936
Epoch 345 completed out of 1000 loss: 0.7167849540710449
Epoch 346 completed out of 1000 loss: 0.7167379856109619
Epoch 347 completed out of 1000 loss: 0.7166945338249207
Epoch 348 completed out of 1000 loss: 0.7166459560394287
Epoch 349 completed out of 1000 loss: 0.7166052460670471
Epoch 350 completed out of 1000 loss: 0.7165599465370178
Epoch 351 completed out of 1000 loss: 0.7165190577507019
Epoch 352 completed out of 1000 loss: 0.7164764404296875
Epoch 353 completed out of 1000 loss: 0.7164323329925537
Epoch 354 completed out of 1000 loss: 0.716389000415802
Epoch 355 completed out of 1000 loss: 0.7163486480712891
Epoch 356 completed out of 1000 loss: 0.7163095474243164
Epoch 357 completed out of 1000 loss: 0.7162694931030273
Epoch 358 completed out of 1000 loss: 0.7162303328514099
Epoch 359 completed out of 1000 loss: 0.7161948084831238
Epoch 360 completed out of 1000 loss: 0.7161547541618347
Epoch 361 completed out of 1000 loss: 0.716117799282074
Epoch 362 completed out of 1000 loss: 0.7160821557044983
Epoch 363 completed out of 1000 loss: 0.7160437107086182
Epoch 364 completed out of 1000 loss: 0.7160062193870544
Epoch 365 completed out of 1000 loss: 0.7159687876701355
Epoch 366 completed out of 1000 loss: 0.71593177318573
Epoch 367 completed out of 1000 loss: 0.7158927917480469
Epoch 368 completed out of 1000 loss: 0.7158543467521667
Epoch 369 completed out of 1000 loss: 0.7158145308494568
Epoch 370 completed out of 1000 loss: 0.7157762050628662
Epoch 371 completed out of 1000 loss: 0.715736448764801
Epoch 372 completed out of 1000 loss: 0.7156968712806702
Epoch 373 completed out of 1000 loss: 0.7156597375869751
Epoch 374 completed out of 1000 loss: 0.7156190872192383
Epoch 375 completed out of 1000 loss: 0.7155808806419373
Epoch 376 completed out of 1000 loss: 0.7155420780181885
Epoch 377 completed out of 1000 loss: 0.7155038118362427
Epoch 378 completed out of 1000 loss: 0.7154656052589417
Epoch 379 completed out of 1000 loss: 0.7154279351234436
Epoch 380 completed out of 1000 loss: 0.7153887748718262
Epoch 381 completed out of 1000 loss: 0.7153515219688416
Epoch 382 completed out of 1000 loss: 0.7153145670890808
Epoch 383 completed out of 1000 loss: 0.715277910232544
Epoch 384 completed out of 1000 loss: 0.7152398824691772
Epoch 385 completed out of 1000 loss: 0.7152021527290344
Epoch 386 completed out of 1000 loss: 0.7151608467102051
Epoch 387 completed out of 1000 loss: 0.7151190042495728
Epoch 388 completed out of 1000 loss: 0.7150743007659912
Epoch 389 completed out of 1000 loss: 0.7150339484214783
Epoch 390 completed out of 1000 loss: 0.714993417263031
Epoch 391 completed out of 1000 loss: 0.7149555683135986
Epoch 392 completed out of 1000 loss: 0.7149189710617065
Epoch 393 completed out of 1000 loss: 0.7148851752281189
Epoch 394 completed out of 1000 loss: 0.7148513793945312
Epoch 395 completed out of 1000 loss: 0.7148204445838928
Epoch 396 completed out of 1000 loss: 0.714784562587738
Epoch 397 completed out of 1000 loss: 0.7147531509399414
Epoch 398 completed out of 1000 loss: 0.7147202491760254
Epoch 399 completed out of 1000 loss: 0.7146857976913452
Epoch 400 completed out of 1000 loss: 0.7146551012992859
Epoch 401 completed out of 1000 loss: 0.714625895023346
Epoch 402 completed out of 1000 loss: 0.7145963907241821
Epoch 403 completed out of 1000 loss: 0.7145662903785706
Epoch 404 completed out of 1000 loss: 0.714540958404541
Epoch 405 completed out of 1000 loss: 0.7145146131515503
Epoch 406 completed out of 1000 loss: 0.7144885063171387
Epoch 407 completed out of 1000 loss: 0.7144640684127808
Epoch 408 completed out of 1000 loss: 0.7144378423690796
Epoch 409 completed out of 1000 loss: 0.7144144773483276
Epoch 410 completed out of 1000 loss: 0.7143911123275757
Epoch 411 completed out of 1000 loss: 0.7143680453300476
Epoch 412 completed out of 1000 loss: 0.7143430113792419
Epoch 413 completed out of 1000 loss: 0.7143204212188721
Epoch 414 completed out of 1000 loss: 0.7142956256866455
Epoch 415 completed out of 1000 loss: 0.7142741084098816
Epoch 416 completed out of 1000 loss: 0.7142505049705505
Epoch 417 completed out of 1000 loss: 0.7142274379730225
Epoch 418 completed out of 1000 loss: 0.7142025828361511
Epoch 419 completed out of 1000 loss: 0.7141780257225037
Epoch 420 completed out of 1000 loss: 0.7141551971435547
Epoch 421 completed out of 1000 loss: 0.7141327857971191
Epoch 422 completed out of 1000 loss: 0.7141105532646179
Epoch 423 completed out of 1000 loss: 0.7140876054763794
Epoch 424 completed out of 1000 loss: 0.7140637636184692
Epoch 425 completed out of 1000 loss: 0.7140424847602844
Epoch 426 completed out of 1000 loss: 0.714021623134613
Epoch 427 completed out of 1000 loss: 0.7139976620674133
Epoch 428 completed out of 1000 loss: 0.7139730453491211
Epoch 429 completed out of 1000 loss: 0.7139519453048706
Epoch 430 completed out of 1000 loss: 0.7139325737953186
Epoch 431 completed out of 1000 loss: 0.713911771774292
Epoch 432 completed out of 1000 loss: 0.7138875126838684
Epoch 433 completed out of 1000 loss: 0.7138669490814209
Epoch 434 completed out of 1000 loss: 0.7138465046882629
Epoch 435 completed out of 1000 loss: 0.7138269543647766
Epoch 436 completed out of 1000 loss: 0.7138065695762634
Epoch 437 completed out of 1000 loss: 0.713784396648407
Epoch 438 completed out of 1000 loss: 0.7137638926506042
Epoch 439 completed out of 1000 loss: 0.7137434482574463
Epoch 440 completed out of 1000 loss: 0.7137261629104614
Epoch 441 completed out of 1000 loss: 0.713706374168396
Epoch 442 completed out of 1000 loss: 0.7136843204498291
Epoch 443 completed out of 1000 loss: 0.7136635780334473
Epoch 444 completed out of 1000 loss: 0.7136461734771729
Epoch 445 completed out of 1000 loss: 0.713624894618988
Epoch 446 completed out of 1000 loss: 0.7136033773422241
Epoch 447 completed out of 1000 loss: 0.7135840058326721
Epoch 448 completed out of 1000 loss: 0.7135658264160156
Epoch 449 completed out of 1000 loss: 0.7135453820228577
Epoch 450 completed out of 1000 loss: 0.7135233879089355
Epoch 451 completed out of 1000 loss: 0.7135046720504761
Epoch 452 completed out of 1000 loss: 0.7134873270988464
Epoch 453 completed out of 1000 loss: 0.7134655117988586
Epoch 454 completed out of 1000 loss: 0.7134471535682678
Epoch 455 completed out of 1000 loss: 0.7134296894073486
Epoch 456 completed out of 1000 loss: 0.7134098410606384
Epoch 457 completed out of 1000 loss: 0.7133935689926147
Epoch 458 completed out of 1000 loss: 0.7133739590644836
Epoch 459 completed out of 1000 loss: 0.7133554816246033
Epoch 460 completed out of 1000 loss: 0.7133393287658691
Epoch 461 completed out of 1000 loss: 0.7133182883262634
Epoch 462 completed out of 1000 loss: 0.7133009433746338
Epoch 463 completed out of 1000 loss: 0.7132801413536072
Epoch 464 completed out of 1000 loss: 0.713263750076294
Epoch 465 completed out of 1000 loss: 0.713242769241333
Epoch 466 completed out of 1000 loss: 0.7132261395454407
Epoch 467 completed out of 1000 loss: 0.713204562664032
Epoch 468 completed out of 1000 loss: 0.7131863236427307
Epoch 469 completed out of 1000 loss: 0.7131643891334534
Epoch 470 completed out of 1000 loss: 0.7131456732749939
Epoch 471 completed out of 1000 loss: 0.7131276726722717
Epoch 472 completed out of 1000 loss: 0.7131088972091675
Epoch 473 completed out of 1000 loss: 0.7130902409553528
Epoch 474 completed out of 1000 loss: 0.7130720615386963
Epoch 475 completed out of 1000 loss: 0.7130560278892517
Epoch 476 completed out of 1000 loss: 0.713036298751831
Epoch 477 completed out of 1000 loss: 0.713020384311676
Epoch 478 completed out of 1000 loss: 0.7130011320114136
Epoch 479 completed out of 1000 loss: 0.7129854559898376
Epoch 480 completed out of 1000 loss: 0.7129673361778259
Epoch 481 completed out of 1000 loss: 0.7129526138305664
Epoch 482 completed out of 1000 loss: 0.712933361530304
Epoch 483 completed out of 1000 loss: 0.7129186987876892
Epoch 484 completed out of 1000 loss: 0.7129003405570984
Epoch 485 completed out of 1000 loss: 0.7128852009773254
Epoch 486 completed out of 1000 loss: 0.7128673791885376
Epoch 487 completed out of 1000 loss: 0.7128522992134094
Epoch 488 completed out of 1000 loss: 0.7128338813781738
Epoch 489 completed out of 1000 loss: 0.712819516658783
Epoch 490 completed out of 1000 loss: 0.7128015756607056
Epoch 491 completed out of 1000 loss: 0.712787389755249
Epoch 492 completed out of 1000 loss: 0.7127698063850403
Epoch 493 completed out of 1000 loss: 0.7127538323402405
Epoch 494 completed out of 1000 loss: 0.712738573551178
Epoch 495 completed out of 1000 loss: 0.7127220034599304
Epoch 496 completed out of 1000 loss: 0.7127082347869873
Epoch 497 completed out of 1000 loss: 0.712691068649292
Epoch 498 completed out of 1000 loss: 0.7126776576042175
Epoch 499 completed out of 1000 loss: 0.7126606702804565
Epoch 500 completed out of 1000 loss: 0.7126439809799194
Epoch 501 completed out of 1000 loss: 0.7126307487487793
Epoch 502 completed out of 1000 loss: 0.7126124501228333
Epoch 503 completed out of 1000 loss: 0.7125998139381409
Epoch 504 completed out of 1000 loss: 0.7125825881958008
Epoch 505 completed out of 1000 loss: 0.7125666737556458
Epoch 506 completed out of 1000 loss: 0.7125518321990967
Epoch 507 completed out of 1000 loss: 0.7125343680381775
Epoch 508 completed out of 1000 loss: 0.7125194072723389
Epoch 509 completed out of 1000 loss: 0.7125014662742615
Epoch 510 completed out of 1000 loss: 0.712482213973999
Epoch 511 completed out of 1000 loss: 0.7124622464179993
Epoch 512 completed out of 1000 loss: 0.7124425172805786
Epoch 513 completed out of 1000 loss: 0.7124274969100952
Epoch 514 completed out of 1000 loss: 0.7124096155166626
Epoch 515 completed out of 1000 loss: 0.7123969793319702
Epoch 516 completed out of 1000 loss: 0.7123785614967346
Epoch 517 completed out of 1000 loss: 0.7123651504516602
Epoch 518 completed out of 1000 loss: 0.712347149848938
Epoch 519 completed out of 1000 loss: 0.7123351097106934
Epoch 520 completed out of 1000 loss: 0.712319552898407
Epoch 521 completed out of 1000 loss: 0.7123069763183594
Epoch 522 completed out of 1000 loss: 0.7122893929481506
Epoch 523 completed out of 1000 loss: 0.7122756242752075
Epoch 524 completed out of 1000 loss: 0.712259829044342
Epoch 525 completed out of 1000 loss: 0.7122448086738586
Epoch 526 completed out of 1000 loss: 0.7122315764427185
Epoch 527 completed out of 1000 loss: 0.71221524477005
Epoch 528 completed out of 1000 loss: 0.7122002243995667
Epoch 529 completed out of 1000 loss: 0.7121883034706116
Epoch 530 completed out of 1000 loss: 0.7121729254722595
Epoch 531 completed out of 1000 loss: 0.7121567726135254
Epoch 532 completed out of 1000 loss: 0.7121452689170837
Epoch 533 completed out of 1000 loss: 0.7121319770812988
Epoch 534 completed out of 1000 loss: 0.7121178507804871
Epoch 535 completed out of 1000 loss: 0.7121021747589111
Epoch 536 completed out of 1000 loss: 0.7120913863182068
Epoch 537 completed out of 1000 loss: 0.7120771408081055
Epoch 538 completed out of 1000 loss: 0.7120653390884399
Epoch 539 completed out of 1000 loss: 0.7120509147644043
Epoch 540 completed out of 1000 loss: 0.7120389938354492
Epoch 541 completed out of 1000 loss: 0.7120234370231628
Epoch 542 completed out of 1000 loss: 0.712011456489563
Epoch 543 completed out of 1000 loss: 0.7119982242584229
Epoch 544 completed out of 1000 loss: 0.7119853496551514
Epoch 545 completed out of 1000 loss: 0.7119715809822083
Epoch 546 completed out of 1000 loss: 0.7119596600532532
Epoch 547 completed out of 1000 loss: 0.7119470238685608
Epoch 548 completed out of 1000 loss: 0.7119343280792236
Epoch 549 completed out of 1000 loss: 0.711921215057373
Epoch 550 completed out of 1000 loss: 0.7119096517562866
Epoch 551 completed out of 1000 loss: 0.7118958830833435
Epoch 552 completed out of 1000 loss: 0.7118838429450989
Epoch 553 completed out of 1000 loss: 0.7118722200393677
Epoch 554 completed out of 1000 loss: 0.7118591666221619
Epoch 555 completed out of 1000 loss: 0.7118476629257202
Epoch 556 completed out of 1000 loss: 0.7118338346481323
Epoch 557 completed out of 1000 loss: 0.711822509765625
Epoch 558 completed out of 1000 loss: 0.7118108868598938
Epoch 559 completed out of 1000 loss: 0.711797833442688
Epoch 560 completed out of 1000 loss: 0.7117870450019836
Epoch 561 completed out of 1000 loss: 0.7117735147476196
Epoch 562 completed out of 1000 loss: 0.7117631435394287
Epoch 563 completed out of 1000 loss: 0.7117509245872498
Epoch 564 completed out of 1000 loss: 0.7117393016815186
Epoch 565 completed out of 1000 loss: 0.7117286920547485
Epoch 566 completed out of 1000 loss: 0.7117161154747009
Epoch 567 completed out of 1000 loss: 0.711703896522522
Epoch 568 completed out of 1000 loss: 0.7116929888725281
Epoch 569 completed out of 1000 loss: 0.7116810083389282
Epoch 570 completed out of 1000 loss: 0.7116690278053284
Epoch 571 completed out of 1000 loss: 0.7116596698760986
Epoch 572 completed out of 1000 loss: 0.7116476893424988
Epoch 573 completed out of 1000 loss: 0.7116366028785706
Epoch 574 completed out of 1000 loss: 0.7116245627403259
Epoch 575 completed out of 1000 loss: 0.7116146683692932
Epoch 576 completed out of 1000 loss: 0.7116033434867859
Epoch 577 completed out of 1000 loss: 0.7115904092788696
Epoch 578 completed out of 1000 loss: 0.7115803360939026
Epoch 579 completed out of 1000 loss: 0.7115687727928162
Epoch 580 completed out of 1000 loss: 0.7115576267242432
Epoch 581 completed out of 1000 loss: 0.711546003818512
Epoch 582 completed out of 1000 loss: 0.7115346789360046
Epoch 583 completed out of 1000 loss: 0.7115247249603271
Epoch 584 completed out of 1000 loss: 0.711512565612793
Epoch 585 completed out of 1000 loss: 0.7115012407302856
Epoch 586 completed out of 1000 loss: 0.7114890813827515
Epoch 587 completed out of 1000 loss: 0.7114763855934143
Epoch 588 completed out of 1000 loss: 0.7114666700363159
Epoch 589 completed out of 1000 loss: 0.7114552855491638
Epoch 590 completed out of 1000 loss: 0.7114445567131042
Epoch 591 completed out of 1000 loss: 0.7114338874816895
Epoch 592 completed out of 1000 loss: 0.7114222645759583
Epoch 593 completed out of 1000 loss: 0.71141117811203
Epoch 594 completed out of 1000 loss: 0.7114003896713257
Epoch 595 completed out of 1000 loss: 0.7113885879516602
Epoch 596 completed out of 1000 loss: 0.7113778591156006
Epoch 597 completed out of 1000 loss: 0.7113683223724365
Epoch 598 completed out of 1000 loss: 0.7113568186759949
Epoch 599 completed out of 1000 loss: 0.7113465070724487
Epoch 600 completed out of 1000 loss: 0.7113377451896667
Epoch 601 completed out of 1000 loss: 0.7113261222839355
Epoch 602 completed out of 1000 loss: 0.7113142609596252
Epoch 603 completed out of 1000 loss: 0.7113031148910522
Epoch 604 completed out of 1000 loss: 0.7112911343574524
Epoch 605 completed out of 1000 loss: 0.7112812399864197
Epoch 606 completed out of 1000 loss: 0.7112715244293213
Epoch 607 completed out of 1000 loss: 0.7112630009651184
Epoch 608 completed out of 1000 loss: 0.7112500071525574
Epoch 609 completed out of 1000 loss: 0.7112407088279724
Epoch 610 completed out of 1000 loss: 0.711229145526886
Epoch 611 completed out of 1000 loss: 0.7112197875976562
Epoch 612 completed out of 1000 loss: 0.7112101316452026
Epoch 613 completed out of 1000 loss: 0.7111989259719849
Epoch 614 completed out of 1000 loss: 0.7111877799034119
Epoch 615 completed out of 1000 loss: 0.7111769318580627
Epoch 616 completed out of 1000 loss: 0.7111681699752808
Epoch 617 completed out of 1000 loss: 0.7111577391624451
Epoch 618 completed out of 1000 loss: 0.711147129535675
Epoch 619 completed out of 1000 loss: 0.7111369967460632
Epoch 620 completed out of 1000 loss: 0.7111284136772156
Epoch 621 completed out of 1000 loss: 0.7111170887947083
Epoch 622 completed out of 1000 loss: 0.7111059427261353
Epoch 623 completed out of 1000 loss: 0.7110968828201294
Epoch 624 completed out of 1000 loss: 0.7110869288444519
Epoch 625 completed out of 1000 loss: 0.7110757231712341
Epoch 626 completed out of 1000 loss: 0.7110657691955566
Epoch 627 completed out of 1000 loss: 0.7110582590103149
Epoch 628 completed out of 1000 loss: 0.7110467553138733
Epoch 629 completed out of 1000 loss: 0.7110355496406555
Epoch 630 completed out of 1000 loss: 0.7110267877578735
Epoch 631 completed out of 1000 loss: 0.711018443107605
Epoch 632 completed out of 1000 loss: 0.7110068202018738
Epoch 633 completed out of 1000 loss: 0.7109966278076172
Epoch 634 completed out of 1000 loss: 0.7109875679016113
Epoch 635 completed out of 1000 loss: 0.7109773755073547
Epoch 636 completed out of 1000 loss: 0.7109666466712952
Epoch 637 completed out of 1000 loss: 0.7109591960906982
Epoch 638 completed out of 1000 loss: 0.7109470367431641
Epoch 639 completed out of 1000 loss: 0.7109365463256836
Epoch 640 completed out of 1000 loss: 0.7109272480010986
Epoch 641 completed out of 1000 loss: 0.7109186053276062
Epoch 642 completed out of 1000 loss: 0.7109076976776123
Epoch 643 completed out of 1000 loss: 0.7108988761901855
Epoch 644 completed out of 1000 loss: 0.7108895182609558
Epoch 645 completed out of 1000 loss: 0.7108776569366455
Epoch 646 completed out of 1000 loss: 0.7108689546585083
Epoch 647 completed out of 1000 loss: 0.7108614444732666
Epoch 648 completed out of 1000 loss: 0.7108488082885742
Epoch 649 completed out of 1000 loss: 0.7108399868011475
Epoch 650 completed out of 1000 loss: 0.7108320593833923
Epoch 651 completed out of 1000 loss: 0.710818886756897
Epoch 652 completed out of 1000 loss: 0.7108103036880493
Epoch 653 completed out of 1000 loss: 0.7108002305030823
Epoch 654 completed out of 1000 loss: 0.7107903957366943
Epoch 655 completed out of 1000 loss: 0.7107829451560974
Epoch 656 completed out of 1000 loss: 0.7107717394828796
Epoch 657 completed out of 1000 loss: 0.7107622027397156
Epoch 658 completed out of 1000 loss: 0.7107530832290649
Epoch 659 completed out of 1000 loss: 0.7107423543930054
Epoch 660 completed out of 1000 loss: 0.7107329368591309
Epoch 661 completed out of 1000 loss: 0.7107251286506653
Epoch 662 completed out of 1000 loss: 0.7107122540473938
Epoch 663 completed out of 1000 loss: 0.7107046842575073
Epoch 664 completed out of 1000 loss: 0.7106943130493164
Epoch 665 completed out of 1000 loss: 0.7106844782829285
Epoch 666 completed out of 1000 loss: 0.7106763124465942
Epoch 667 completed out of 1000 loss: 0.7106652855873108
Epoch 668 completed out of 1000 loss: 0.7106554508209229
Epoch 669 completed out of 1000 loss: 0.7106472253799438
Epoch 670 completed out of 1000 loss: 0.7106353640556335
Epoch 671 completed out of 1000 loss: 0.7106274962425232
Epoch 672 completed out of 1000 loss: 0.7106170058250427
Epoch 673 completed out of 1000 loss: 0.7106074690818787
Epoch 674 completed out of 1000 loss: 0.7105993628501892
Epoch 675 completed out of 1000 loss: 0.7105872631072998
Epoch 676 completed out of 1000 loss: 0.710578978061676
Epoch 677 completed out of 1000 loss: 0.7105677723884583
Epoch 678 completed out of 1000 loss: 0.7105585336685181
Epoch 679 completed out of 1000 loss: 0.7105507254600525
Epoch 680 completed out of 1000 loss: 0.7105382084846497
Epoch 681 completed out of 1000 loss: 0.7105316519737244
Epoch 682 completed out of 1000 loss: 0.7105204463005066
Epoch 683 completed out of 1000 loss: 0.7105106711387634
Epoch 684 completed out of 1000 loss: 0.7105011940002441
Epoch 685 completed out of 1000 loss: 0.7104918956756592
Epoch 686 completed out of 1000 loss: 0.7104845643043518
Epoch 687 completed out of 1000 loss: 0.7104709148406982
Epoch 688 completed out of 1000 loss: 0.7104642391204834
Epoch 689 completed out of 1000 loss: 0.7104526162147522
Epoch 690 completed out of 1000 loss: 0.7104430794715881
Epoch 691 completed out of 1000 loss: 0.7104337215423584
Epoch 692 completed out of 1000 loss: 0.7104241251945496
Epoch 693 completed out of 1000 loss: 0.7104165554046631
Epoch 694 completed out of 1000 loss: 0.7104043364524841
Epoch 695 completed out of 1000 loss: 0.7103962898254395
Epoch 696 completed out of 1000 loss: 0.7103850245475769
Epoch 697 completed out of 1000 loss: 0.7103755474090576
Epoch 698 completed out of 1000 loss: 0.7103660106658936
Epoch 699 completed out of 1000 loss: 0.710355818271637
Epoch 700 completed out of 1000 loss: 0.7103488445281982
Epoch 701 completed out of 1000 loss: 0.7103352546691895
Epoch 702 completed out of 1000 loss: 0.710328996181488
Epoch 703 completed out of 1000 loss: 0.7103169560432434
Epoch 704 completed out of 1000 loss: 0.7103079557418823
Epoch 705 completed out of 1000 loss: 0.7102984189987183
Epoch 706 completed out of 1000 loss: 0.7102881073951721
Epoch 707 completed out of 1000 loss: 0.7102811932563782
Epoch 708 completed out of 1000 loss: 0.7102686762809753
Epoch 709 completed out of 1000 loss: 0.7102615237236023
Epoch 710 completed out of 1000 loss: 0.7102500796318054
Epoch 711 completed out of 1000 loss: 0.7102425694465637
Epoch 712 completed out of 1000 loss: 0.7102310657501221
Epoch 713 completed out of 1000 loss: 0.7102206349372864
Epoch 714 completed out of 1000 loss: 0.7102123498916626
Epoch 715 completed out of 1000 loss: 0.7102012038230896
Epoch 716 completed out of 1000 loss: 0.7101926207542419
Epoch 717 completed out of 1000 loss: 0.7101819515228271
Epoch 718 completed out of 1000 loss: 0.7101731300354004
Epoch 719 completed out of 1000 loss: 0.7101624608039856
Epoch 720 completed out of 1000 loss: 0.7101550102233887
Epoch 721 completed out of 1000 loss: 0.7101452946662903
Epoch 722 completed out of 1000 loss: 0.7101323008537292
Epoch 723 completed out of 1000 loss: 0.7101249694824219
Epoch 724 completed out of 1000 loss: 0.7101122736930847
Epoch 725 completed out of 1000 loss: 0.7101059556007385
Epoch 726 completed out of 1000 loss: 0.7100939154624939
Epoch 727 completed out of 1000 loss: 0.7100861072540283
Epoch 728 completed out of 1000 loss: 0.7100765109062195
Epoch 729 completed out of 1000 loss: 0.7100645899772644
Epoch 730 completed out of 1000 loss: 0.7100567817687988
Epoch 731 completed out of 1000 loss: 0.7100439667701721
Epoch 732 completed out of 1000 loss: 0.7100360989570618
Epoch 733 completed out of 1000 loss: 0.7100248336791992
Epoch 734 completed out of 1000 loss: 0.7100167274475098
Epoch 735 completed out of 1000 loss: 0.7100066542625427
Epoch 736 completed out of 1000 loss: 0.7099947333335876
Epoch 737 completed out of 1000 loss: 0.7099872827529907
Epoch 738 completed out of 1000 loss: 0.7099746465682983
Epoch 739 completed out of 1000 loss: 0.7099670171737671
Epoch 740 completed out of 1000 loss: 0.7099543809890747
Epoch 741 completed out of 1000 loss: 0.7099460363388062
Epoch 742 completed out of 1000 loss: 0.7099370956420898
Epoch 743 completed out of 1000 loss: 0.7099247574806213
Epoch 744 completed out of 1000 loss: 0.7099165320396423
Epoch 745 completed out of 1000 loss: 0.7099054455757141
Epoch 746 completed out of 1000 loss: 0.7098973393440247
Epoch 747 completed out of 1000 loss: 0.7098854780197144
Epoch 748 completed out of 1000 loss: 0.7098767757415771
Epoch 749 completed out of 1000 loss: 0.7098684906959534
Epoch 750 completed out of 1000 loss: 0.7098572850227356
Epoch 751 completed out of 1000 loss: 0.7098481059074402
Epoch 752 completed out of 1000 loss: 0.7098374366760254
Epoch 753 completed out of 1000 loss: 0.7098280787467957
Epoch 754 completed out of 1000 loss: 0.7098190188407898
Epoch 755 completed out of 1000 loss: 0.7098085284233093
Epoch 756 completed out of 1000 loss: 0.7097989916801453
Epoch 757 completed out of 1000 loss: 0.7097883820533752
Epoch 758 completed out of 1000 loss: 0.7097789645195007
Epoch 759 completed out of 1000 loss: 0.7097692489624023
Epoch 760 completed out of 1000 loss: 0.7097601294517517
Epoch 761 completed out of 1000 loss: 0.7097504734992981
Epoch 762 completed out of 1000 loss: 0.7097386121749878
Epoch 763 completed out of 1000 loss: 0.7097306847572327
Epoch 764 completed out of 1000 loss: 0.7097195982933044
Epoch 765 completed out of 1000 loss: 0.7097117304801941
Epoch 766 completed out of 1000 loss: 0.7097002863883972
Epoch 767 completed out of 1000 loss: 0.7096922397613525
Epoch 768 completed out of 1000 loss: 0.7096812129020691
Epoch 769 completed out of 1000 loss: 0.7096707224845886
Epoch 770 completed out of 1000 loss: 0.7096627950668335
Epoch 771 completed out of 1000 loss: 0.709650993347168
Epoch 772 completed out of 1000 loss: 0.7096449732780457
Epoch 773 completed out of 1000 loss: 0.7096324563026428
Epoch 774 completed out of 1000 loss: 0.7096248269081116
Epoch 775 completed out of 1000 loss: 0.7096132040023804
Epoch 776 completed out of 1000 loss: 0.7096063494682312
Epoch 777 completed out of 1000 loss: 0.7095938324928284
Epoch 778 completed out of 1000 loss: 0.7095862627029419
Epoch 779 completed out of 1000 loss: 0.7095746994018555
Epoch 780 completed out of 1000 loss: 0.7095670700073242
Epoch 781 completed out of 1000 loss: 0.7095580101013184
Epoch 782 completed out of 1000 loss: 0.7095475792884827
Epoch 783 completed out of 1000 loss: 0.7095384001731873
Epoch 784 completed out of 1000 loss: 0.7095299959182739
Epoch 785 completed out of 1000 loss: 0.7095192670822144
Epoch 786 completed out of 1000 loss: 0.7095105051994324
Epoch 787 completed out of 1000 loss: 0.7095015048980713
Epoch 788 completed out of 1000 loss: 0.7094922065734863
Epoch 789 completed out of 1000 loss: 0.7094814777374268
Epoch 790 completed out of 1000 loss: 0.709473729133606
Epoch 791 completed out of 1000 loss: 0.7094643115997314
Epoch 792 completed out of 1000 loss: 0.7094528079032898
Epoch 793 completed out of 1000 loss: 0.7094470858573914
Epoch 794 completed out of 1000 loss: 0.7094345092773438
Epoch 795 completed out of 1000 loss: 0.7094287872314453
Epoch 796 completed out of 1000 loss: 0.7094171643257141
Epoch 797 completed out of 1000 loss: 0.7094091773033142
Epoch 798 completed out of 1000 loss: 0.7093988060951233
Epoch 799 completed out of 1000 loss: 0.709391713142395
Epoch 800 completed out of 1000 loss: 0.7093800902366638
Epoch 801 completed out of 1000 loss: 0.7093735933303833
Epoch 802 completed out of 1000 loss: 0.7093629240989685
Epoch 803 completed out of 1000 loss: 0.7093536257743835
Epoch 804 completed out of 1000 loss: 0.7093452215194702
Epoch 805 completed out of 1000 loss: 0.7093361020088196
Epoch 806 completed out of 1000 loss: 0.7093260288238525
Epoch 807 completed out of 1000 loss: 0.709318995475769
Epoch 808 completed out of 1000 loss: 0.7093102335929871
Epoch 809 completed out of 1000 loss: 0.7092978954315186
Epoch 810 completed out of 1000 loss: 0.7092923521995544
Epoch 811 completed out of 1000 loss: 0.7092803716659546
Epoch 812 completed out of 1000 loss: 0.7092735171318054
Epoch 813 completed out of 1000 loss: 0.709264874458313
Epoch 814 completed out of 1000 loss: 0.7092549204826355
Epoch 815 completed out of 1000 loss: 0.7092459797859192
Epoch 816 completed out of 1000 loss: 0.7092374563217163
Epoch 817 completed out of 1000 loss: 0.709229052066803
Epoch 818 completed out of 1000 loss: 0.7092169523239136
Epoch 819 completed out of 1000 loss: 0.7092104554176331
Epoch 820 completed out of 1000 loss: 0.7092009782791138
Epoch 821 completed out of 1000 loss: 0.709191620349884
Epoch 822 completed out of 1000 loss: 0.7091837525367737
Epoch 823 completed out of 1000 loss: 0.709174394607544
Epoch 824 completed out of 1000 loss: 0.7091652154922485
Epoch 825 completed out of 1000 loss: 0.7091542482376099
Epoch 826 completed out of 1000 loss: 0.709147572517395
Epoch 827 completed out of 1000 loss: 0.7091392874717712
Epoch 828 completed out of 1000 loss: 0.7091298699378967
Epoch 829 completed out of 1000 loss: 0.7091207504272461
Epoch 830 completed out of 1000 loss: 0.7091110944747925
Epoch 831 completed out of 1000 loss: 0.709102213382721
Epoch 832 completed out of 1000 loss: 0.7090935707092285
Epoch 833 completed out of 1000 loss: 0.709083616733551
Epoch 834 completed out of 1000 loss: 0.7090739607810974
Epoch 835 completed out of 1000 loss: 0.7090645432472229
Epoch 836 completed out of 1000 loss: 0.7090525031089783
Epoch 837 completed out of 1000 loss: 0.7090429067611694
Epoch 838 completed out of 1000 loss: 0.7090303897857666
Epoch 839 completed out of 1000 loss: 0.7090203166007996
Epoch 840 completed out of 1000 loss: 0.709008514881134
Epoch 841 completed out of 1000 loss: 0.70899897813797
Epoch 842 completed out of 1000 loss: 0.7089893817901611
Epoch 843 completed out of 1000 loss: 0.7089798450469971
Epoch 844 completed out of 1000 loss: 0.7089705467224121
Epoch 845 completed out of 1000 loss: 0.7089613676071167
Epoch 846 completed out of 1000 loss: 0.7089515924453735
Epoch 847 completed out of 1000 loss: 0.7089425921440125
Epoch 848 completed out of 1000 loss: 0.708933413028717
Epoch 849 completed out of 1000 loss: 0.7089232802391052
Epoch 850 completed out of 1000 loss: 0.7089152932167053
Epoch 851 completed out of 1000 loss: 0.7089047431945801
Epoch 852 completed out of 1000 loss: 0.7088959217071533
Epoch 853 completed out of 1000 loss: 0.7088857293128967
Epoch 854 completed out of 1000 loss: 0.7088779807090759
Epoch 855 completed out of 1000 loss: 0.7088688611984253
Epoch 856 completed out of 1000 loss: 0.7088605761528015
Epoch 857 completed out of 1000 loss: 0.7088502645492554
Epoch 858 completed out of 1000 loss: 0.7088422775268555
Epoch 859 completed out of 1000 loss: 0.7088339328765869
Epoch 860 completed out of 1000 loss: 0.7088230848312378
Epoch 861 completed out of 1000 loss: 0.7088122367858887
Epoch 862 completed out of 1000 loss: 0.7088019847869873
Epoch 863 completed out of 1000 loss: 0.7087917923927307
Epoch 864 completed out of 1000 loss: 0.7087838053703308
Epoch 865 completed out of 1000 loss: 0.708772599697113
Epoch 866 completed out of 1000 loss: 0.708764910697937
Epoch 867 completed out of 1000 loss: 0.7087568640708923
Epoch 868 completed out of 1000 loss: 0.7087481617927551
Epoch 869 completed out of 1000 loss: 0.7087405323982239
Epoch 870 completed out of 1000 loss: 0.7087312340736389
Epoch 871 completed out of 1000 loss: 0.7087251543998718
Epoch 872 completed out of 1000 loss: 0.708715558052063
Epoch 873 completed out of 1000 loss: 0.7087085247039795
Epoch 874 completed out of 1000 loss: 0.7086998820304871
Epoch 875 completed out of 1000 loss: 0.7086912393569946
Epoch 876 completed out of 1000 loss: 0.7086824178695679
Epoch 877 completed out of 1000 loss: 0.7086741924285889
Epoch 878 completed out of 1000 loss: 0.7086664438247681
Epoch 879 completed out of 1000 loss: 0.708655834197998
Epoch 880 completed out of 1000 loss: 0.7086474299430847
Epoch 881 completed out of 1000 loss: 0.7086388468742371
Epoch 882 completed out of 1000 loss: 0.7086312174797058
Epoch 883 completed out of 1000 loss: 0.7086232900619507
Epoch 884 completed out of 1000 loss: 0.7086154818534851
Epoch 885 completed out of 1000 loss: 0.7086061239242554
Epoch 886 completed out of 1000 loss: 0.7085995078086853
Epoch 887 completed out of 1000 loss: 0.7085887789726257
Epoch 888 completed out of 1000 loss: 0.7085812091827393
Epoch 889 completed out of 1000 loss: 0.7085728049278259
Epoch 890 completed out of 1000 loss: 0.7085641026496887
Epoch 891 completed out of 1000 loss: 0.7085569500923157
Epoch 892 completed out of 1000 loss: 0.7085479497909546
Epoch 893 completed out of 1000 loss: 0.7085413932800293
Epoch 894 completed out of 1000 loss: 0.7085320353507996
Epoch 895 completed out of 1000 loss: 0.7085255980491638
Epoch 896 completed out of 1000 loss: 0.7085157036781311
Epoch 897 completed out of 1000 loss: 0.7085094451904297
Epoch 898 completed out of 1000 loss: 0.7084996700286865
Epoch 899 completed out of 1000 loss: 0.7084912657737732
Epoch 900 completed out of 1000 loss: 0.7084853053092957
Epoch 901 completed out of 1000 loss: 0.7084754109382629
Epoch 902 completed out of 1000 loss: 0.7084691524505615
Epoch 903 completed out of 1000 loss: 0.7084593176841736
Epoch 904 completed out of 1000 loss: 0.7084528803825378
Epoch 905 completed out of 1000 loss: 0.7084437608718872
Epoch 906 completed out of 1000 loss: 0.7084363698959351
Epoch 907 completed out of 1000 loss: 0.7084272503852844
Epoch 908 completed out of 1000 loss: 0.7084203362464905
Epoch 909 completed out of 1000 loss: 0.7084125280380249
Epoch 910 completed out of 1000 loss: 0.7084025740623474
Epoch 911 completed out of 1000 loss: 0.7083956003189087
Epoch 912 completed out of 1000 loss: 0.7083860039710999
Epoch 913 completed out of 1000 loss: 0.7083795666694641
Epoch 914 completed out of 1000 loss: 0.7083714604377747
Epoch 915 completed out of 1000 loss: 0.7083612084388733
Epoch 916 completed out of 1000 loss: 0.7083560228347778
Epoch 917 completed out of 1000 loss: 0.7083450555801392
Epoch 918 completed out of 1000 loss: 0.7083379626274109
Epoch 919 completed out of 1000 loss: 0.7083304524421692
Epoch 920 completed out of 1000 loss: 0.7083205580711365
Epoch 921 completed out of 1000 loss: 0.7083131670951843
Epoch 922 completed out of 1000 loss: 0.708301842212677
Epoch 923 completed out of 1000 loss: 0.708294689655304
Epoch 924 completed out of 1000 loss: 0.708285927772522
Epoch 925 completed out of 1000 loss: 0.7082760334014893
Epoch 926 completed out of 1000 loss: 0.7082712650299072
Epoch 927 completed out of 1000 loss: 0.708261251449585
Epoch 928 completed out of 1000 loss: 0.7082564234733582
Epoch 929 completed out of 1000 loss: 0.7082480192184448
Epoch 930 completed out of 1000 loss: 0.7082415223121643
Epoch 931 completed out of 1000 loss: 0.7082341909408569
Epoch 932 completed out of 1000 loss: 0.7082250118255615
Epoch 933 completed out of 1000 loss: 0.7082175016403198
Epoch 934 completed out of 1000 loss: 0.7082077860832214
Epoch 935 completed out of 1000 loss: 0.7082023024559021
Epoch 936 completed out of 1000 loss: 0.7081918716430664
Epoch 937 completed out of 1000 loss: 0.7081865668296814
Epoch 938 completed out of 1000 loss: 0.7081776857376099
Epoch 939 completed out of 1000 loss: 0.7081714868545532
Epoch 940 completed out of 1000 loss: 0.7081639766693115
Epoch 941 completed out of 1000 loss: 0.7081565260887146
Epoch 942 completed out of 1000 loss: 0.7081499695777893
Epoch 943 completed out of 1000 loss: 0.7081425189971924
Epoch 944 completed out of 1000 loss: 0.7081336975097656
Epoch 945 completed out of 1000 loss: 0.7081267833709717
Epoch 946 completed out of 1000 loss: 0.7081180214881897
Epoch 947 completed out of 1000 loss: 0.7081114053726196
Epoch 948 completed out of 1000 loss: 0.7081043124198914
Epoch 949 completed out of 1000 loss: 0.7080957889556885
Epoch 950 completed out of 1000 loss: 0.7080895304679871
Epoch 951 completed out of 1000 loss: 0.7080831527709961
Epoch 952 completed out of 1000 loss: 0.7080751657485962
Epoch 953 completed out of 1000 loss: 0.7080678939819336
Epoch 954 completed out of 1000 loss: 0.708060622215271
Epoch 955 completed out of 1000 loss: 0.7080520391464233
Epoch 956 completed out of 1000 loss: 0.7080463171005249
Epoch 957 completed out of 1000 loss: 0.7080376744270325
Epoch 958 completed out of 1000 loss: 0.7080320715904236
Epoch 959 completed out of 1000 loss: 0.7080243825912476
Epoch 960 completed out of 1000 loss: 0.708016574382782
Epoch 961 completed out of 1000 loss: 0.7080098390579224
Epoch 962 completed out of 1000 loss: 0.7080015540122986
Epoch 963 completed out of 1000 loss: 0.7079923748970032
Epoch 964 completed out of 1000 loss: 0.7079875469207764
Epoch 965 completed out of 1000 loss: 0.7079795598983765
Epoch 966 completed out of 1000 loss: 0.7079721689224243
Epoch 967 completed out of 1000 loss: 0.7079657912254333
Epoch 968 completed out of 1000 loss: 0.7079585194587708
Epoch 969 completed out of 1000 loss: 0.7079508900642395
Epoch 970 completed out of 1000 loss: 0.7079433798789978
Epoch 971 completed out of 1000 loss: 0.7079371213912964
Epoch 972 completed out of 1000 loss: 0.7079287171363831
Epoch 973 completed out of 1000 loss: 0.7079219222068787
Epoch 974 completed out of 1000 loss: 0.7079150080680847
Epoch 975 completed out of 1000 loss: 0.7079076766967773
Epoch 976 completed out of 1000 loss: 0.7079002261161804
Epoch 977 completed out of 1000 loss: 0.7078927159309387
Epoch 978 completed out of 1000 loss: 0.7078859210014343
Epoch 979 completed out of 1000 loss: 0.7078791260719299
Epoch 980 completed out of 1000 loss: 0.7078721523284912
Epoch 981 completed out of 1000 loss: 0.7078635692596436
Epoch 982 completed out of 1000 loss: 0.707857072353363
Epoch 983 completed out of 1000 loss: 0.707852303981781
Epoch 984 completed out of 1000 loss: 0.7078424096107483
Epoch 985 completed out of 1000 loss: 0.7078364491462708
Epoch 986 completed out of 1000 loss: 0.7078282833099365
Epoch 987 completed out of 1000 loss: 0.7078211903572083
Epoch 988 completed out of 1000 loss: 0.707815945148468
Epoch 989 completed out of 1000 loss: 0.7078058123588562
Epoch 990 completed out of 1000 loss: 0.7078005075454712
Epoch 991 completed out of 1000 loss: 0.7077937722206116
Epoch 992 completed out of 1000 loss: 0.7077857851982117
Epoch 993 completed out of 1000 loss: 0.7077799439430237
Epoch 994 completed out of 1000 loss: 0.707771897315979
Epoch 995 completed out of 1000 loss: 0.7077667117118835
Epoch 996 completed out of 1000 loss: 0.7077581882476807
Epoch 997 completed out of 1000 loss: 0.7077509164810181
Epoch 998 completed out of 1000 loss: 0.7077450156211853
Epoch 999 completed out of 1000 loss: 0.7077359557151794
Tensor("Softmax:0", shape=(?, 2), dtype=float32)
accuracy on train: 0.5337489
Accuracy on test : 0.53982955
In [109]:
predy=np.argmax(predy,axis=1)
cm8 = confusion_matrix(df_ya,predy)
plt.figure(figsize = (10,7))
cm8df = pd.DataFrame(cm8, index = ["1",'0'], columns = ["1",'0'])
seaborn.set(font_scale=1.4)#for label size
seaborn.heatmap(cm8df, annot=True,annot_kws={"size": 16})# font size
Out[109]:
<matplotlib.axes._subplots.AxesSubplot at 0x20f0bcf8cc8>
In [110]:
fpr, tpr, _ = roc_curve(predy, df_ya, drop_intermediate=False)
plt.plot(fpr, tpr, color='red')
plt.xlabel('fpr')
plt.ylabel('tpr')
plt.title('ROC curve fr Tensorflow 1')
plt.show()
In [111]:
seq = np.arange(1000).tolist()
plt.plot(seq, losslist)
plt.show()
In [112]:
#TensorFlow MLP

n_nodes_hl1 =30
n_nodes_hl2=45
n_nodes_hl3=10
n_nodes_hl4=8
n_classes = 2

x = tf.placeholder('float', [None, 15])
y = tf.placeholder('float',[None,2])

def neuralnetwork2(data):
    hiddenlayer1 = {'weights':tf.Variable(tf.random_normal([15, n_nodes_hl1])), 'biases':tf.Variable(tf.random_normal([n_nodes_hl1]))}
    hiddenlayer2 = {'weights':tf.Variable(tf.random_normal([n_nodes_hl1, n_nodes_hl2])), 'biases':tf.Variable(tf.random_normal([n_nodes_hl2]))}
#     hiddenlayer3 = {'weights':tf.Variable(tf.random_normal([n_nodes_hl2, n_nodes_hl3])), 'biases':tf.Variable(tf.random_normal([n_nodes_hl3]))}
    # hiddenlayer4 = {'weights':tf.Variable(tf.random_normal([n_nodes_hl3, n_nodes_hl4])), 'biases':tf.Variable(tf.random_normal([n_nodes_hl4]))}
    outputlayer = {'weights':tf.Variable(tf.random_normal([n_nodes_hl2, n_classes])), 'biases':tf.Variable(tf.random_normal([n_classes])),}

    l1 = tf.add(tf.matmul(data,hiddenlayer1['weights']), hiddenlayer1['biases'])
    l1 = tf.nn.tanh(l1)
    l2 = tf.add(tf.matmul(l1,hiddenlayer2['weights']), hiddenlayer2['biases'])
    l2 = tf.nn.tanh(l2)
#     l3 = tf.add(tf.matmul(l2,hiddenlayer3['weights']), hiddenlayer3['biases'])
#     l3 = tf.nn.tanh(l3)
    # l4 = tf.add(tf.matmul(l3,hiddenlayer4['weights']), hiddenlayer4['biases'])
    # l4 = tf.nn.tanh(l4)

    output = tf.matmul(l2,outputlayer['weights']) + outputlayer['biases']
    output=tf.nn.softmax(output)

    return output
In [113]:
def train_neural_network2(x, dfx, dfy, dftestx, dftesty):
    prediction = neuralnetwork2(x)
    eploss=[]
    eplosstrain=[]
    cost = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(logits=prediction, labels=y ))
    
    optimizer = tf.train.AdamOptimizer(learning_rate=0.0001).minimize(cost)
    # print(optimizer)
    
    epochs = 1000
    with tf.Session() as sess:
        # OLD:
        # sess.run(tf.initialize_all_variables())
        # NEW:
        sess.run(tf.global_variables_initializer())

        for epoch in range(epochs):
            epoch_loss = 0
            epoch_losstrain=0
            # for i in range(40613):
            #     epoch_x, epoch_y =dfbcde[i].reshape(1,-1),df_ybcde[i].reshape(1,-1) 
            #     # print(type((epoch_x)))
            #     lam, c = sess.run([optimizer, cost], feed_dict={x: (epoch_x), y: (epoch_y)})
            #     epoch_loss += c
            #     print(lam,c)
            for index in range(0,38867,10000):
                epoch_x, epoch_y =dfx[index:index+10000,:],dfy[index:index+10000,:]
                lam, c = sess.run([optimizer, cost], feed_dict={x: (epoch_x), y: (epoch_y)})
                # lam, c1 = sess.run([optimizer, cost], feed_dict={x: (dfa), y: (df_yaten)})
                epoch_loss += c
            eploss.append(epoch_loss)
  
            print('Epoch', epoch, 'completed out of',epochs,'loss:',epoch_loss)
        print(prediction)
        # confusion = tf.confusion_matrix(labels=y, predictions=prediction, num_classes=2)
        predy=prediction.eval({x:dftestx,y:dftesty})
        print(predy)
        print(np.array_equal(predy,dftesty))
        correct = tf.equal(tf.argmax(prediction, 1), tf.argmax(y, 1))
        accuracy = tf.reduce_mean(tf.cast(correct, 'float'))
        print("accuracy on train:",accuracy.eval({x:dfx,y:dfy}))
        print('Accuracy on test :',accuracy.eval({x:dftestx, y:dftesty}))
        # print(confusion.eval(session=sess))
        print(dftesty)
    return eploss, predy
In [114]:
losslist2, predy2 = train_neural_network2(x, dfacde, df_yacdeten, dfb, df_ybten)
predy2 = np.argmax(predy2, axis=1)
print(predy2)
Epoch 0 completed out of 1000 loss: 3.171965777873993
Epoch 1 completed out of 1000 loss: 3.168803870677948
Epoch 2 completed out of 1000 loss: 3.1660982966423035
Epoch 3 completed out of 1000 loss: 3.1637458205223083
Epoch 4 completed out of 1000 loss: 3.161473512649536
Epoch 5 completed out of 1000 loss: 3.159274160861969
Epoch 6 completed out of 1000 loss: 3.1570618748664856
Epoch 7 completed out of 1000 loss: 3.154956817626953
Epoch 8 completed out of 1000 loss: 3.1529464721679688
Epoch 9 completed out of 1000 loss: 3.1509217619895935
Epoch 10 completed out of 1000 loss: 3.148977518081665
Epoch 11 completed out of 1000 loss: 3.1471049189567566
Epoch 12 completed out of 1000 loss: 3.145249843597412
Epoch 13 completed out of 1000 loss: 3.1433736085891724
Epoch 14 completed out of 1000 loss: 3.1413976550102234
Epoch 15 completed out of 1000 loss: 3.139322578907013
Epoch 16 completed out of 1000 loss: 3.137369990348816
Epoch 17 completed out of 1000 loss: 3.1355563402175903
Epoch 18 completed out of 1000 loss: 3.133792817592621
Epoch 19 completed out of 1000 loss: 3.132047653198242
Epoch 20 completed out of 1000 loss: 3.130306303501129
Epoch 21 completed out of 1000 loss: 3.128575384616852
Epoch 22 completed out of 1000 loss: 3.1268333196640015
Epoch 23 completed out of 1000 loss: 3.1250946521759033
Epoch 24 completed out of 1000 loss: 3.1234130859375
Epoch 25 completed out of 1000 loss: 3.121786117553711
Epoch 26 completed out of 1000 loss: 3.1201661825180054
Epoch 27 completed out of 1000 loss: 3.1185463666915894
Epoch 28 completed out of 1000 loss: 3.116936147212982
Epoch 29 completed out of 1000 loss: 3.1153677105903625
Epoch 30 completed out of 1000 loss: 3.113852560520172
Epoch 31 completed out of 1000 loss: 3.1124236583709717
Epoch 32 completed out of 1000 loss: 3.1108798384666443
Epoch 33 completed out of 1000 loss: 3.109238386154175
Epoch 34 completed out of 1000 loss: 3.107829451560974
Epoch 35 completed out of 1000 loss: 3.1064311861991882
Epoch 36 completed out of 1000 loss: 3.104996144771576
Epoch 37 completed out of 1000 loss: 3.1036654114723206
Epoch 38 completed out of 1000 loss: 3.1024189591407776
Epoch 39 completed out of 1000 loss: 3.101180136203766
Epoch 40 completed out of 1000 loss: 3.099949598312378
Epoch 41 completed out of 1000 loss: 3.098723530769348
Epoch 42 completed out of 1000 loss: 3.097507357597351
Epoch 43 completed out of 1000 loss: 3.0962710976600647
Epoch 44 completed out of 1000 loss: 3.09499728679657
Epoch 45 completed out of 1000 loss: 3.0935768485069275
Epoch 46 completed out of 1000 loss: 3.092003583908081
Epoch 47 completed out of 1000 loss: 3.090606987476349
Epoch 48 completed out of 1000 loss: 3.089482605457306
Epoch 49 completed out of 1000 loss: 3.088249623775482
Epoch 50 completed out of 1000 loss: 3.086941719055176
Epoch 51 completed out of 1000 loss: 3.085596978664398
Epoch 52 completed out of 1000 loss: 3.0842649340629578
Epoch 53 completed out of 1000 loss: 3.0829505920410156
Epoch 54 completed out of 1000 loss: 3.0815796852111816
Epoch 55 completed out of 1000 loss: 3.0801661014556885
Epoch 56 completed out of 1000 loss: 3.0787249207496643
Epoch 57 completed out of 1000 loss: 3.0772330164909363
Epoch 58 completed out of 1000 loss: 3.0756804943084717
Epoch 59 completed out of 1000 loss: 3.0740699768066406
Epoch 60 completed out of 1000 loss: 3.0723859071731567
Epoch 61 completed out of 1000 loss: 3.0706112384796143
Epoch 62 completed out of 1000 loss: 3.068741798400879
Epoch 63 completed out of 1000 loss: 3.066764771938324
Epoch 64 completed out of 1000 loss: 3.064674735069275
Epoch 65 completed out of 1000 loss: 3.0624528527259827
Epoch 66 completed out of 1000 loss: 3.0600980520248413
Epoch 67 completed out of 1000 loss: 3.0576058626174927
Epoch 68 completed out of 1000 loss: 3.0549941658973694
Epoch 69 completed out of 1000 loss: 3.0522770285606384
Epoch 70 completed out of 1000 loss: 3.0494874715805054
Epoch 71 completed out of 1000 loss: 3.0466546416282654
Epoch 72 completed out of 1000 loss: 3.0438168048858643
Epoch 73 completed out of 1000 loss: 3.0410043597221375
Epoch 74 completed out of 1000 loss: 3.03825044631958
Epoch 75 completed out of 1000 loss: 3.0355626940727234
Epoch 76 completed out of 1000 loss: 3.0329431891441345
Epoch 77 completed out of 1000 loss: 3.0303881764411926
Epoch 78 completed out of 1000 loss: 3.0278900861740112
Epoch 79 completed out of 1000 loss: 3.025440752506256
Epoch 80 completed out of 1000 loss: 3.0230295658111572
Epoch 81 completed out of 1000 loss: 3.020641565322876
Epoch 82 completed out of 1000 loss: 3.0182610750198364
Epoch 83 completed out of 1000 loss: 3.015897214412689
Epoch 84 completed out of 1000 loss: 3.013542652130127
Epoch 85 completed out of 1000 loss: 3.0112146139144897
Epoch 86 completed out of 1000 loss: 3.0088940858840942
Epoch 87 completed out of 1000 loss: 3.006580948829651
Epoch 88 completed out of 1000 loss: 3.004263937473297
Epoch 89 completed out of 1000 loss: 3.001935839653015
Epoch 90 completed out of 1000 loss: 2.999576151371002
Epoch 91 completed out of 1000 loss: 2.9971505403518677
Epoch 92 completed out of 1000 loss: 2.994624435901642
Epoch 93 completed out of 1000 loss: 2.9919448494911194
Epoch 94 completed out of 1000 loss: 2.989057183265686
Epoch 95 completed out of 1000 loss: 2.985878586769104
Epoch 96 completed out of 1000 loss: 2.982326924800873
Epoch 97 completed out of 1000 loss: 2.978286862373352
Epoch 98 completed out of 1000 loss: 2.9736448526382446
Epoch 99 completed out of 1000 loss: 2.9682831168174744
Epoch 100 completed out of 1000 loss: 2.9620989561080933
Epoch 101 completed out of 1000 loss: 2.955052614212036
Epoch 102 completed out of 1000 loss: 2.9472163319587708
Epoch 103 completed out of 1000 loss: 2.9388359785079956
Epoch 104 completed out of 1000 loss: 2.9303089380264282
Epoch 105 completed out of 1000 loss: 2.9221307039260864
Epoch 106 completed out of 1000 loss: 2.9147399067878723
Epoch 107 completed out of 1000 loss: 2.9082614183425903
Epoch 108 completed out of 1000 loss: 2.90277361869812
Epoch 109 completed out of 1000 loss: 2.8983983993530273
Epoch 110 completed out of 1000 loss: 2.8949215412139893
Epoch 111 completed out of 1000 loss: 2.8918173909187317
Epoch 112 completed out of 1000 loss: 2.888687312602997
Epoch 113 completed out of 1000 loss: 2.8857303261756897
Epoch 114 completed out of 1000 loss: 2.8834293484687805
Epoch 115 completed out of 1000 loss: 2.881026804447174
Epoch 116 completed out of 1000 loss: 2.878696382045746
Epoch 117 completed out of 1000 loss: 2.8762820959091187
Epoch 118 completed out of 1000 loss: 2.874593138694763
Epoch 119 completed out of 1000 loss: 2.873262941837311
Epoch 120 completed out of 1000 loss: 2.8723923563957214
Epoch 121 completed out of 1000 loss: 2.87144011259079
Epoch 122 completed out of 1000 loss: 2.8703529834747314
Epoch 123 completed out of 1000 loss: 2.869382679462433
Epoch 124 completed out of 1000 loss: 2.8686837553977966
Epoch 125 completed out of 1000 loss: 2.8679492473602295
Epoch 126 completed out of 1000 loss: 2.867221236228943
Epoch 127 completed out of 1000 loss: 2.866579294204712
Epoch 128 completed out of 1000 loss: 2.8660247921943665
Epoch 129 completed out of 1000 loss: 2.8655059933662415
Epoch 130 completed out of 1000 loss: 2.864988684654236
Epoch 131 completed out of 1000 loss: 2.8644720315933228
Epoch 132 completed out of 1000 loss: 2.863947033882141
Epoch 133 completed out of 1000 loss: 2.8634990453720093
Epoch 134 completed out of 1000 loss: 2.8630942702293396
Epoch 135 completed out of 1000 loss: 2.862691342830658
Epoch 136 completed out of 1000 loss: 2.8623088598251343
Epoch 137 completed out of 1000 loss: 2.8619465827941895
Epoch 138 completed out of 1000 loss: 2.8615880608558655
Epoch 139 completed out of 1000 loss: 2.86124050617218
Epoch 140 completed out of 1000 loss: 2.86088889837265
Epoch 141 completed out of 1000 loss: 2.8605388402938843
Epoch 142 completed out of 1000 loss: 2.860206961631775
Epoch 143 completed out of 1000 loss: 2.859870970249176
Epoch 144 completed out of 1000 loss: 2.8595147132873535
Epoch 145 completed out of 1000 loss: 2.859149694442749
Epoch 146 completed out of 1000 loss: 2.858791410923004
Epoch 147 completed out of 1000 loss: 2.8584328293800354
Epoch 148 completed out of 1000 loss: 2.8580653071403503
Epoch 149 completed out of 1000 loss: 2.857674241065979
Epoch 150 completed out of 1000 loss: 2.857285439968109
Epoch 151 completed out of 1000 loss: 2.8569690585136414
Epoch 152 completed out of 1000 loss: 2.856652617454529
Epoch 153 completed out of 1000 loss: 2.856274902820587
Epoch 154 completed out of 1000 loss: 2.855824112892151
Epoch 155 completed out of 1000 loss: 2.855371832847595
Epoch 156 completed out of 1000 loss: 2.855004668235779
Epoch 157 completed out of 1000 loss: 2.854607045650482
Epoch 158 completed out of 1000 loss: 2.854142665863037
Epoch 159 completed out of 1000 loss: 2.853667140007019
Epoch 160 completed out of 1000 loss: 2.853114902973175
Epoch 161 completed out of 1000 loss: 2.852542459964752
Epoch 162 completed out of 1000 loss: 2.8521097898483276
Epoch 163 completed out of 1000 loss: 2.851576566696167
Epoch 164 completed out of 1000 loss: 2.851071357727051
Epoch 165 completed out of 1000 loss: 2.850785732269287
Epoch 166 completed out of 1000 loss: 2.8504011631011963
Epoch 167 completed out of 1000 loss: 2.8500260710716248
Epoch 168 completed out of 1000 loss: 2.8496264815330505
Epoch 169 completed out of 1000 loss: 2.849288582801819
Epoch 170 completed out of 1000 loss: 2.849038064479828
Epoch 171 completed out of 1000 loss: 2.8487401604652405
Epoch 172 completed out of 1000 loss: 2.848420739173889
Epoch 173 completed out of 1000 loss: 2.848123848438263
Epoch 174 completed out of 1000 loss: 2.8478298783302307
Epoch 175 completed out of 1000 loss: 2.84751695394516
Epoch 176 completed out of 1000 loss: 2.8472121357917786
Epoch 177 completed out of 1000 loss: 2.846919357776642
Epoch 178 completed out of 1000 loss: 2.8466166853904724
Epoch 179 completed out of 1000 loss: 2.846309721469879
Epoch 180 completed out of 1000 loss: 2.84599906206131
Epoch 181 completed out of 1000 loss: 2.8456820249557495
Epoch 182 completed out of 1000 loss: 2.8453604578971863
Epoch 183 completed out of 1000 loss: 2.8450408577919006
Epoch 184 completed out of 1000 loss: 2.844719350337982
Epoch 185 completed out of 1000 loss: 2.8443942070007324
Epoch 186 completed out of 1000 loss: 2.8440625071525574
Epoch 187 completed out of 1000 loss: 2.8437203764915466
Epoch 188 completed out of 1000 loss: 2.8433696031570435
Epoch 189 completed out of 1000 loss: 2.8430256843566895
Epoch 190 completed out of 1000 loss: 2.8426785469055176
Epoch 191 completed out of 1000 loss: 2.842332363128662
Epoch 192 completed out of 1000 loss: 2.841986894607544
Epoch 193 completed out of 1000 loss: 2.8416473865509033
Epoch 194 completed out of 1000 loss: 2.8413087725639343
Epoch 195 completed out of 1000 loss: 2.840970277786255
Epoch 196 completed out of 1000 loss: 2.840629816055298
Epoch 197 completed out of 1000 loss: 2.8402870297431946
Epoch 198 completed out of 1000 loss: 2.839939594268799
Epoch 199 completed out of 1000 loss: 2.8395919799804688
Epoch 200 completed out of 1000 loss: 2.839236080646515
Epoch 201 completed out of 1000 loss: 2.8388792276382446
Epoch 202 completed out of 1000 loss: 2.838515818119049
Epoch 203 completed out of 1000 loss: 2.838148355484009
Epoch 204 completed out of 1000 loss: 2.8377765417099
Epoch 205 completed out of 1000 loss: 2.8374013900756836
Epoch 206 completed out of 1000 loss: 2.8370203375816345
Epoch 207 completed out of 1000 loss: 2.8366382718086243
Epoch 208 completed out of 1000 loss: 2.836251676082611
Epoch 209 completed out of 1000 loss: 2.835860252380371
Epoch 210 completed out of 1000 loss: 2.8354703783988953
Epoch 211 completed out of 1000 loss: 2.83507764339447
Epoch 212 completed out of 1000 loss: 2.834683358669281
Epoch 213 completed out of 1000 loss: 2.8342889547348022
Epoch 214 completed out of 1000 loss: 2.83389550447464
Epoch 215 completed out of 1000 loss: 2.8335022926330566
Epoch 216 completed out of 1000 loss: 2.833112418651581
Epoch 217 completed out of 1000 loss: 2.832725703716278
Epoch 218 completed out of 1000 loss: 2.832342505455017
Epoch 219 completed out of 1000 loss: 2.831961750984192
Epoch 220 completed out of 1000 loss: 2.831588566303253
Epoch 221 completed out of 1000 loss: 2.8312230110168457
Epoch 222 completed out of 1000 loss: 2.8308604955673218
Epoch 223 completed out of 1000 loss: 2.830503463745117
Epoch 224 completed out of 1000 loss: 2.8301549553871155
Epoch 225 completed out of 1000 loss: 2.8298139572143555
Epoch 226 completed out of 1000 loss: 2.8294743299484253
Epoch 227 completed out of 1000 loss: 2.8291401267051697
Epoch 228 completed out of 1000 loss: 2.828808307647705
Epoch 229 completed out of 1000 loss: 2.8284741044044495
Epoch 230 completed out of 1000 loss: 2.828132748603821
Epoch 231 completed out of 1000 loss: 2.827789783477783
Epoch 232 completed out of 1000 loss: 2.8274616599082947
Epoch 233 completed out of 1000 loss: 2.8271527886390686
Epoch 234 completed out of 1000 loss: 2.8268595337867737
Epoch 235 completed out of 1000 loss: 2.82657653093338
Epoch 236 completed out of 1000 loss: 2.8263020515441895
Epoch 237 completed out of 1000 loss: 2.826033294200897
Epoch 238 completed out of 1000 loss: 2.825771749019623
Epoch 239 completed out of 1000 loss: 2.825514793395996
Epoch 240 completed out of 1000 loss: 2.825260579586029
Epoch 241 completed out of 1000 loss: 2.8250163793563843
Epoch 242 completed out of 1000 loss: 2.8247708678245544
Epoch 243 completed out of 1000 loss: 2.824526071548462
Epoch 244 completed out of 1000 loss: 2.8242805004119873
Epoch 245 completed out of 1000 loss: 2.8240415453910828
Epoch 246 completed out of 1000 loss: 2.8238070011138916
Epoch 247 completed out of 1000 loss: 2.823574423789978
Epoch 248 completed out of 1000 loss: 2.823349952697754
Epoch 249 completed out of 1000 loss: 2.82312935590744
Epoch 250 completed out of 1000 loss: 2.822906792163849
Epoch 251 completed out of 1000 loss: 2.822681427001953
Epoch 252 completed out of 1000 loss: 2.822462797164917
Epoch 253 completed out of 1000 loss: 2.8222402930259705
Epoch 254 completed out of 1000 loss: 2.8220183849334717
Epoch 255 completed out of 1000 loss: 2.821801245212555
Epoch 256 completed out of 1000 loss: 2.821583867073059
Epoch 257 completed out of 1000 loss: 2.821363866329193
Epoch 258 completed out of 1000 loss: 2.8211416006088257
Epoch 259 completed out of 1000 loss: 2.8209117650985718
Epoch 260 completed out of 1000 loss: 2.8206816911697388
Epoch 261 completed out of 1000 loss: 2.820450246334076
Epoch 262 completed out of 1000 loss: 2.82019579410553
Epoch 263 completed out of 1000 loss: 2.8198861479759216
Epoch 264 completed out of 1000 loss: 2.8195059299468994
Epoch 265 completed out of 1000 loss: 2.8191170692443848
Epoch 266 completed out of 1000 loss: 2.8187100291252136
Epoch 267 completed out of 1000 loss: 2.8183279633522034
Epoch 268 completed out of 1000 loss: 2.817923069000244
Epoch 269 completed out of 1000 loss: 2.81742525100708
Epoch 270 completed out of 1000 loss: 2.816885232925415
Epoch 271 completed out of 1000 loss: 2.816330373287201
Epoch 272 completed out of 1000 loss: 2.8156405091285706
Epoch 273 completed out of 1000 loss: 2.8150212168693542
Epoch 274 completed out of 1000 loss: 2.8148301243782043
Epoch 275 completed out of 1000 loss: 2.8145967721939087
Epoch 276 completed out of 1000 loss: 2.8142848014831543
Epoch 277 completed out of 1000 loss: 2.813973069190979
Epoch 278 completed out of 1000 loss: 2.8136855959892273
Epoch 279 completed out of 1000 loss: 2.8133277893066406
Epoch 280 completed out of 1000 loss: 2.8128445148468018
Epoch 281 completed out of 1000 loss: 2.812427520751953
Epoch 282 completed out of 1000 loss: 2.812313735485077
Epoch 283 completed out of 1000 loss: 2.8122037649154663
Epoch 284 completed out of 1000 loss: 2.812052309513092
Epoch 285 completed out of 1000 loss: 2.811869263648987
Epoch 286 completed out of 1000 loss: 2.8116832971572876
Epoch 287 completed out of 1000 loss: 2.811509668827057
Epoch 288 completed out of 1000 loss: 2.811353862285614
Epoch 289 completed out of 1000 loss: 2.8111959099769592
Epoch 290 completed out of 1000 loss: 2.811031758785248
Epoch 291 completed out of 1000 loss: 2.810864508152008
Epoch 292 completed out of 1000 loss: 2.8106976747512817
Epoch 293 completed out of 1000 loss: 2.8105348944664
Epoch 294 completed out of 1000 loss: 2.810373544692993
Epoch 295 completed out of 1000 loss: 2.8102161288261414
Epoch 296 completed out of 1000 loss: 2.810063898563385
Epoch 297 completed out of 1000 loss: 2.809912919998169
Epoch 298 completed out of 1000 loss: 2.8097671270370483
Epoch 299 completed out of 1000 loss: 2.8096237182617188
Epoch 300 completed out of 1000 loss: 2.8094752430915833
Epoch 301 completed out of 1000 loss: 2.8093305230140686
Epoch 302 completed out of 1000 loss: 2.8091841340065002
Epoch 303 completed out of 1000 loss: 2.8090397715568542
Epoch 304 completed out of 1000 loss: 2.8088964223861694
Epoch 305 completed out of 1000 loss: 2.8087525367736816
Epoch 306 completed out of 1000 loss: 2.808608591556549
Epoch 307 completed out of 1000 loss: 2.8084657192230225
Epoch 308 completed out of 1000 loss: 2.8083282709121704
Epoch 309 completed out of 1000 loss: 2.8081894516944885
Epoch 310 completed out of 1000 loss: 2.808053493499756
Epoch 311 completed out of 1000 loss: 2.807921528816223
Epoch 312 completed out of 1000 loss: 2.8077856302261353
Epoch 313 completed out of 1000 loss: 2.807653844356537
Epoch 314 completed out of 1000 loss: 2.8075209259986877
Epoch 315 completed out of 1000 loss: 2.8073906898498535
Epoch 316 completed out of 1000 loss: 2.807259678840637
Epoch 317 completed out of 1000 loss: 2.8071311116218567
Epoch 318 completed out of 1000 loss: 2.8070061802864075
Epoch 319 completed out of 1000 loss: 2.806879222393036
Epoch 320 completed out of 1000 loss: 2.8067519068717957
Epoch 321 completed out of 1000 loss: 2.8066248297691345
Epoch 322 completed out of 1000 loss: 2.8065006136894226
Epoch 323 completed out of 1000 loss: 2.8063735961914062
Epoch 324 completed out of 1000 loss: 2.8062486052513123
Epoch 325 completed out of 1000 loss: 2.8061218857765198
Epoch 326 completed out of 1000 loss: 2.805991768836975
Epoch 327 completed out of 1000 loss: 2.805860221385956
Epoch 328 completed out of 1000 loss: 2.8057299852371216
Epoch 329 completed out of 1000 loss: 2.8055933713912964
Epoch 330 completed out of 1000 loss: 2.805449664592743
Epoch 331 completed out of 1000 loss: 2.8052823543548584
Epoch 332 completed out of 1000 loss: 2.8050766587257385
Epoch 333 completed out of 1000 loss: 2.804842472076416
Epoch 334 completed out of 1000 loss: 2.804606080055237
Epoch 335 completed out of 1000 loss: 2.8044334053993225
Epoch 336 completed out of 1000 loss: 2.804305911064148
Epoch 337 completed out of 1000 loss: 2.804154098033905
Epoch 338 completed out of 1000 loss: 2.803960680961609
Epoch 339 completed out of 1000 loss: 2.803713381290436
Epoch 340 completed out of 1000 loss: 2.803484857082367
Epoch 341 completed out of 1000 loss: 2.803310215473175
Epoch 342 completed out of 1000 loss: 2.8031870126724243
Epoch 343 completed out of 1000 loss: 2.8030226826667786
Epoch 344 completed out of 1000 loss: 2.8028225898742676
Epoch 345 completed out of 1000 loss: 2.8026267886161804
Epoch 346 completed out of 1000 loss: 2.8024336099624634
Epoch 347 completed out of 1000 loss: 2.80226868391037
Epoch 348 completed out of 1000 loss: 2.802066683769226
Epoch 349 completed out of 1000 loss: 2.801894426345825
Epoch 350 completed out of 1000 loss: 2.8017541766166687
Epoch 351 completed out of 1000 loss: 2.801634967327118
Epoch 352 completed out of 1000 loss: 2.801514744758606
Epoch 353 completed out of 1000 loss: 2.801390290260315
Epoch 354 completed out of 1000 loss: 2.8012713193893433
Epoch 355 completed out of 1000 loss: 2.8011505603790283
Epoch 356 completed out of 1000 loss: 2.8010305166244507
Epoch 357 completed out of 1000 loss: 2.800911545753479
Epoch 358 completed out of 1000 loss: 2.8007880449295044
Epoch 359 completed out of 1000 loss: 2.800661265850067
Epoch 360 completed out of 1000 loss: 2.800523340702057
Epoch 361 completed out of 1000 loss: 2.8003637194633484
Epoch 362 completed out of 1000 loss: 2.8001648783683777
Epoch 363 completed out of 1000 loss: 2.7999271750450134
Epoch 364 completed out of 1000 loss: 2.7997254133224487
Epoch 365 completed out of 1000 loss: 2.799537479877472
Epoch 366 completed out of 1000 loss: 2.79935085773468
Epoch 367 completed out of 1000 loss: 2.7991599440574646
Epoch 368 completed out of 1000 loss: 2.7990137338638306
Epoch 369 completed out of 1000 loss: 2.798889458179474
Epoch 370 completed out of 1000 loss: 2.7987587451934814
Epoch 371 completed out of 1000 loss: 2.798617362976074
Epoch 372 completed out of 1000 loss: 2.7984690070152283
Epoch 373 completed out of 1000 loss: 2.7983230352401733
Epoch 374 completed out of 1000 loss: 2.7981709241867065
Epoch 375 completed out of 1000 loss: 2.798018217086792
Epoch 376 completed out of 1000 loss: 2.797865152359009
Epoch 377 completed out of 1000 loss: 2.797716796398163
Epoch 378 completed out of 1000 loss: 2.797580122947693
Epoch 379 completed out of 1000 loss: 2.7974525094032288
Epoch 380 completed out of 1000 loss: 2.7973297834396362
Epoch 381 completed out of 1000 loss: 2.7972124814987183
Epoch 382 completed out of 1000 loss: 2.79709792137146
Epoch 383 completed out of 1000 loss: 2.7969810962677
Epoch 384 completed out of 1000 loss: 2.7968679666519165
Epoch 385 completed out of 1000 loss: 2.7967522144317627
Epoch 386 completed out of 1000 loss: 2.7966378331184387
Epoch 387 completed out of 1000 loss: 2.7965254187583923
Epoch 388 completed out of 1000 loss: 2.7964096665382385
Epoch 389 completed out of 1000 loss: 2.79629784822464
Epoch 390 completed out of 1000 loss: 2.7961791157722473
Epoch 391 completed out of 1000 loss: 2.796066641807556
Epoch 392 completed out of 1000 loss: 2.795950770378113
Epoch 393 completed out of 1000 loss: 2.795832574367523
Epoch 394 completed out of 1000 loss: 2.795715868473053
Epoch 395 completed out of 1000 loss: 2.7955960035324097
Epoch 396 completed out of 1000 loss: 2.795478940010071
Epoch 397 completed out of 1000 loss: 2.7953606843948364
Epoch 398 completed out of 1000 loss: 2.7952388525009155
Epoch 399 completed out of 1000 loss: 2.7951173186302185
Epoch 400 completed out of 1000 loss: 2.79499489068985
Epoch 401 completed out of 1000 loss: 2.7948743104934692
Epoch 402 completed out of 1000 loss: 2.7947519421577454
Epoch 403 completed out of 1000 loss: 2.7946296334266663
Epoch 404 completed out of 1000 loss: 2.794508397579193
Epoch 405 completed out of 1000 loss: 2.7943856716156006
Epoch 406 completed out of 1000 loss: 2.7942612171173096
Epoch 407 completed out of 1000 loss: 2.794138550758362
Epoch 408 completed out of 1000 loss: 2.7940163612365723
Epoch 409 completed out of 1000 loss: 2.7938926219940186
Epoch 410 completed out of 1000 loss: 2.7937658429145813
Epoch 411 completed out of 1000 loss: 2.793640673160553
Epoch 412 completed out of 1000 loss: 2.7935139536857605
Epoch 413 completed out of 1000 loss: 2.7933855652809143
Epoch 414 completed out of 1000 loss: 2.79325795173645
Epoch 415 completed out of 1000 loss: 2.7931295037269592
Epoch 416 completed out of 1000 loss: 2.7929991483688354
Epoch 417 completed out of 1000 loss: 2.792868137359619
Epoch 418 completed out of 1000 loss: 2.7927348613739014
Epoch 419 completed out of 1000 loss: 2.7925997376441956
Epoch 420 completed out of 1000 loss: 2.792464017868042
Epoch 421 completed out of 1000 loss: 2.792330026626587
Epoch 422 completed out of 1000 loss: 2.7921895384788513
Epoch 423 completed out of 1000 loss: 2.7920480370521545
Epoch 424 completed out of 1000 loss: 2.791904628276825
Epoch 425 completed out of 1000 loss: 2.7917609810829163
Epoch 426 completed out of 1000 loss: 2.7916133999824524
Epoch 427 completed out of 1000 loss: 2.7914631366729736
Epoch 428 completed out of 1000 loss: 2.7913121581077576
Epoch 429 completed out of 1000 loss: 2.7911559343338013
Epoch 430 completed out of 1000 loss: 2.7909973859786987
Epoch 431 completed out of 1000 loss: 2.790834665298462
Epoch 432 completed out of 1000 loss: 2.790669798851013
Epoch 433 completed out of 1000 loss: 2.7905009388923645
Epoch 434 completed out of 1000 loss: 2.7903294563293457
Epoch 435 completed out of 1000 loss: 2.790155053138733
Epoch 436 completed out of 1000 loss: 2.7899771332740784
Epoch 437 completed out of 1000 loss: 2.78979754447937
Epoch 438 completed out of 1000 loss: 2.7896175384521484
Epoch 439 completed out of 1000 loss: 2.789440333843231
Epoch 440 completed out of 1000 loss: 2.7892597913742065
Epoch 441 completed out of 1000 loss: 2.789084017276764
Epoch 442 completed out of 1000 loss: 2.7889082431793213
Epoch 443 completed out of 1000 loss: 2.788736343383789
Epoch 444 completed out of 1000 loss: 2.788568079471588
Epoch 445 completed out of 1000 loss: 2.788401961326599
Epoch 446 completed out of 1000 loss: 2.7882421612739563
Epoch 447 completed out of 1000 loss: 2.7880842089653015
Epoch 448 completed out of 1000 loss: 2.78792941570282
Epoch 449 completed out of 1000 loss: 2.7877766489982605
Epoch 450 completed out of 1000 loss: 2.787628650665283
Epoch 451 completed out of 1000 loss: 2.7874805331230164
Epoch 452 completed out of 1000 loss: 2.7873371839523315
Epoch 453 completed out of 1000 loss: 2.787195563316345
Epoch 454 completed out of 1000 loss: 2.7870527505874634
Epoch 455 completed out of 1000 loss: 2.7869126200675964
Epoch 456 completed out of 1000 loss: 2.7867714762687683
Epoch 457 completed out of 1000 loss: 2.7866349816322327
Epoch 458 completed out of 1000 loss: 2.786495864391327
Epoch 459 completed out of 1000 loss: 2.7863553166389465
Epoch 460 completed out of 1000 loss: 2.7862154245376587
Epoch 461 completed out of 1000 loss: 2.7860787510871887
Epoch 462 completed out of 1000 loss: 2.7859379649162292
Epoch 463 completed out of 1000 loss: 2.7857993245124817
Epoch 464 completed out of 1000 loss: 2.785659074783325
Epoch 465 completed out of 1000 loss: 2.785519301891327
Epoch 466 completed out of 1000 loss: 2.7853779792785645
Epoch 467 completed out of 1000 loss: 2.785235822200775
Epoch 468 completed out of 1000 loss: 2.7850952744483948
Epoch 469 completed out of 1000 loss: 2.7849538326263428
Epoch 470 completed out of 1000 loss: 2.7848138213157654
Epoch 471 completed out of 1000 loss: 2.7846697568893433
Epoch 472 completed out of 1000 loss: 2.7845293283462524
Epoch 473 completed out of 1000 loss: 2.7843878269195557
Epoch 474 completed out of 1000 loss: 2.784249484539032
Epoch 475 completed out of 1000 loss: 2.784108817577362
Epoch 476 completed out of 1000 loss: 2.78397274017334
Epoch 477 completed out of 1000 loss: 2.7838358879089355
Epoch 478 completed out of 1000 loss: 2.783700168132782
Epoch 479 completed out of 1000 loss: 2.7835678458213806
Epoch 480 completed out of 1000 loss: 2.7834368348121643
Epoch 481 completed out of 1000 loss: 2.7833073139190674
Epoch 482 completed out of 1000 loss: 2.7831777334213257
Epoch 483 completed out of 1000 loss: 2.783050000667572
Epoch 484 completed out of 1000 loss: 2.7829249501228333
Epoch 485 completed out of 1000 loss: 2.782802641391754
Epoch 486 completed out of 1000 loss: 2.782677710056305
Epoch 487 completed out of 1000 loss: 2.7825543880462646
Epoch 488 completed out of 1000 loss: 2.782426357269287
Epoch 489 completed out of 1000 loss: 2.7822898030281067
Epoch 490 completed out of 1000 loss: 2.7821332216262817
Epoch 491 completed out of 1000 loss: 2.7819413542747498
Epoch 492 completed out of 1000 loss: 2.7817615270614624
Epoch 493 completed out of 1000 loss: 2.7816027998924255
Epoch 494 completed out of 1000 loss: 2.7814537286758423
Epoch 495 completed out of 1000 loss: 2.7813221216201782
Epoch 496 completed out of 1000 loss: 2.7812005281448364
Epoch 497 completed out of 1000 loss: 2.781084895133972
Epoch 498 completed out of 1000 loss: 2.7809682488441467
Epoch 499 completed out of 1000 loss: 2.7808578610420227
Epoch 500 completed out of 1000 loss: 2.7807496786117554
Epoch 501 completed out of 1000 loss: 2.7806442975997925
Epoch 502 completed out of 1000 loss: 2.7805384397506714
Epoch 503 completed out of 1000 loss: 2.7804338932037354
Epoch 504 completed out of 1000 loss: 2.780331075191498
Epoch 505 completed out of 1000 loss: 2.7802306413650513
Epoch 506 completed out of 1000 loss: 2.7801337838172913
Epoch 507 completed out of 1000 loss: 2.780036687850952
Epoch 508 completed out of 1000 loss: 2.779941499233246
Epoch 509 completed out of 1000 loss: 2.779850423336029
Epoch 510 completed out of 1000 loss: 2.779757022857666
Epoch 511 completed out of 1000 loss: 2.7796652913093567
Epoch 512 completed out of 1000 loss: 2.7795751690864563
Epoch 513 completed out of 1000 loss: 2.7794876098632812
Epoch 514 completed out of 1000 loss: 2.779399335384369
Epoch 515 completed out of 1000 loss: 2.7793121337890625
Epoch 516 completed out of 1000 loss: 2.7792237401008606
Epoch 517 completed out of 1000 loss: 2.77913898229599
Epoch 518 completed out of 1000 loss: 2.7790557146072388
Epoch 519 completed out of 1000 loss: 2.7789703011512756
Epoch 520 completed out of 1000 loss: 2.778886556625366
Epoch 521 completed out of 1000 loss: 2.778804659843445
Epoch 522 completed out of 1000 loss: 2.7787243723869324
Epoch 523 completed out of 1000 loss: 2.7786408066749573
Epoch 524 completed out of 1000 loss: 2.7785603404045105
Epoch 525 completed out of 1000 loss: 2.778480291366577
Epoch 526 completed out of 1000 loss: 2.7783984541893005
Epoch 527 completed out of 1000 loss: 2.778320789337158
Epoch 528 completed out of 1000 loss: 2.778244733810425
Epoch 529 completed out of 1000 loss: 2.7781664729118347
Epoch 530 completed out of 1000 loss: 2.778087615966797
Epoch 531 completed out of 1000 loss: 2.7780150771141052
Epoch 532 completed out of 1000 loss: 2.7779380679130554
Epoch 533 completed out of 1000 loss: 2.7778618335723877
Epoch 534 completed out of 1000 loss: 2.777786433696747
Epoch 535 completed out of 1000 loss: 2.777715563774109
Epoch 536 completed out of 1000 loss: 2.777641177177429
Epoch 537 completed out of 1000 loss: 2.7775685787200928
Epoch 538 completed out of 1000 loss: 2.7774953842163086
Epoch 539 completed out of 1000 loss: 2.777425765991211
Epoch 540 completed out of 1000 loss: 2.777354419231415
Epoch 541 completed out of 1000 loss: 2.7772828936576843
Epoch 542 completed out of 1000 loss: 2.7772119641304016
Epoch 543 completed out of 1000 loss: 2.777145743370056
Epoch 544 completed out of 1000 loss: 2.777075946331024
Epoch 545 completed out of 1000 loss: 2.777008831501007
Epoch 546 completed out of 1000 loss: 2.7769405245780945
Epoch 547 completed out of 1000 loss: 2.776872217655182
Epoch 548 completed out of 1000 loss: 2.776808440685272
Epoch 549 completed out of 1000 loss: 2.7767415642738342
Epoch 550 completed out of 1000 loss: 2.776675522327423
Epoch 551 completed out of 1000 loss: 2.7766103744506836
Epoch 552 completed out of 1000 loss: 2.7765466570854187
Epoch 553 completed out of 1000 loss: 2.7764813899993896
Epoch 554 completed out of 1000 loss: 2.776418089866638
Epoch 555 completed out of 1000 loss: 2.7763574719429016
Epoch 556 completed out of 1000 loss: 2.7762932777404785
Epoch 557 completed out of 1000 loss: 2.776230275630951
Epoch 558 completed out of 1000 loss: 2.776167571544647
Epoch 559 completed out of 1000 loss: 2.7761070728302
Epoch 560 completed out of 1000 loss: 2.7760459780693054
Epoch 561 completed out of 1000 loss: 2.775982916355133
Epoch 562 completed out of 1000 loss: 2.7759239077568054
Epoch 563 completed out of 1000 loss: 2.77586430311203
Epoch 564 completed out of 1000 loss: 2.7758034467697144
Epoch 565 completed out of 1000 loss: 2.7757447361946106
Epoch 566 completed out of 1000 loss: 2.775683343410492
Epoch 567 completed out of 1000 loss: 2.775622248649597
Epoch 568 completed out of 1000 loss: 2.775563359260559
Epoch 569 completed out of 1000 loss: 2.775502383708954
Epoch 570 completed out of 1000 loss: 2.775443732738495
Epoch 571 completed out of 1000 loss: 2.7753833532333374
Epoch 572 completed out of 1000 loss: 2.7753254175186157
Epoch 573 completed out of 1000 loss: 2.7752645611763
Epoch 574 completed out of 1000 loss: 2.775205433368683
Epoch 575 completed out of 1000 loss: 2.775149643421173
Epoch 576 completed out of 1000 loss: 2.7750890254974365
Epoch 577 completed out of 1000 loss: 2.775033414363861
Epoch 578 completed out of 1000 loss: 2.7749773263931274
Epoch 579 completed out of 1000 loss: 2.774920642375946
Epoch 580 completed out of 1000 loss: 2.774862825870514
Epoch 581 completed out of 1000 loss: 2.774808645248413
Epoch 582 completed out of 1000 loss: 2.774751365184784
Epoch 583 completed out of 1000 loss: 2.7746957540512085
Epoch 584 completed out of 1000 loss: 2.7746416330337524
Epoch 585 completed out of 1000 loss: 2.7745854258537292
Epoch 586 completed out of 1000 loss: 2.77453076839447
Epoch 587 completed out of 1000 loss: 2.7744771242141724
Epoch 588 completed out of 1000 loss: 2.7744222283363342
Epoch 589 completed out of 1000 loss: 2.774367392063141
Epoch 590 completed out of 1000 loss: 2.774315059185028
Epoch 591 completed out of 1000 loss: 2.7742594480514526
Epoch 592 completed out of 1000 loss: 2.7742074131965637
Epoch 593 completed out of 1000 loss: 2.774154305458069
Epoch 594 completed out of 1000 loss: 2.7741013169288635
Epoch 595 completed out of 1000 loss: 2.7740485668182373
Epoch 596 completed out of 1000 loss: 2.7739967107772827
Epoch 597 completed out of 1000 loss: 2.773943781852722
Epoch 598 completed out of 1000 loss: 2.773892879486084
Epoch 599 completed out of 1000 loss: 2.773839592933655
Epoch 600 completed out of 1000 loss: 2.7737887501716614
Epoch 601 completed out of 1000 loss: 2.77373731136322
Epoch 602 completed out of 1000 loss: 2.7736854553222656
Epoch 603 completed out of 1000 loss: 2.773633897304535
Epoch 604 completed out of 1000 loss: 2.773582637310028
Epoch 605 completed out of 1000 loss: 2.7735305428504944
Epoch 606 completed out of 1000 loss: 2.77347993850708
Epoch 607 completed out of 1000 loss: 2.773430049419403
Epoch 608 completed out of 1000 loss: 2.7733784914016724
Epoch 609 completed out of 1000 loss: 2.7733271718025208
Epoch 610 completed out of 1000 loss: 2.7732757925987244
Epoch 611 completed out of 1000 loss: 2.773225426673889
Epoch 612 completed out of 1000 loss: 2.773174226284027
Epoch 613 completed out of 1000 loss: 2.7731232047080994
Epoch 614 completed out of 1000 loss: 2.773072123527527
Epoch 615 completed out of 1000 loss: 2.7730223536491394
Epoch 616 completed out of 1000 loss: 2.7729716300964355
Epoch 617 completed out of 1000 loss: 2.7729220390319824
Epoch 618 completed out of 1000 loss: 2.772872269153595
Epoch 619 completed out of 1000 loss: 2.772820472717285
Epoch 620 completed out of 1000 loss: 2.772771120071411
Epoch 621 completed out of 1000 loss: 2.7727214694023132
Epoch 622 completed out of 1000 loss: 2.772670030593872
Epoch 623 completed out of 1000 loss: 2.7726202607154846
Epoch 624 completed out of 1000 loss: 2.7725696563720703
Epoch 625 completed out of 1000 loss: 2.772519588470459
Epoch 626 completed out of 1000 loss: 2.7724679708480835
Epoch 627 completed out of 1000 loss: 2.772418797016144
Epoch 628 completed out of 1000 loss: 2.772368609905243
Epoch 629 completed out of 1000 loss: 2.77231901884079
Epoch 630 completed out of 1000 loss: 2.772267997264862
Epoch 631 completed out of 1000 loss: 2.7722180485725403
Epoch 632 completed out of 1000 loss: 2.7721680402755737
Epoch 633 completed out of 1000 loss: 2.7721179127693176
Epoch 634 completed out of 1000 loss: 2.7720656991004944
Epoch 635 completed out of 1000 loss: 2.772016704082489
Epoch 636 completed out of 1000 loss: 2.771965980529785
Epoch 637 completed out of 1000 loss: 2.7719154357910156
Epoch 638 completed out of 1000 loss: 2.771864652633667
Epoch 639 completed out of 1000 loss: 2.771814465522766
Epoch 640 completed out of 1000 loss: 2.771763324737549
Epoch 641 completed out of 1000 loss: 2.7717129588127136
Epoch 642 completed out of 1000 loss: 2.7716618180274963
Epoch 643 completed out of 1000 loss: 2.7716105580329895
Epoch 644 completed out of 1000 loss: 2.771558403968811
Epoch 645 completed out of 1000 loss: 2.7715075612068176
Epoch 646 completed out of 1000 loss: 2.7714588046073914
Epoch 647 completed out of 1000 loss: 2.7714074850082397
Epoch 648 completed out of 1000 loss: 2.7713557481765747
Epoch 649 completed out of 1000 loss: 2.7713024616241455
Epoch 650 completed out of 1000 loss: 2.7712502479553223
Epoch 651 completed out of 1000 loss: 2.7711991667747498
Epoch 652 completed out of 1000 loss: 2.771147668361664
Epoch 653 completed out of 1000 loss: 2.7710960507392883
Epoch 654 completed out of 1000 loss: 2.7710456252098083
Epoch 655 completed out of 1000 loss: 2.7709922790527344
Epoch 656 completed out of 1000 loss: 2.7709380984306335
Epoch 657 completed out of 1000 loss: 2.7708860635757446
Epoch 658 completed out of 1000 loss: 2.770835041999817
Epoch 659 completed out of 1000 loss: 2.770781934261322
Epoch 660 completed out of 1000 loss: 2.7707284092903137
Epoch 661 completed out of 1000 loss: 2.770676553249359
Epoch 662 completed out of 1000 loss: 2.7706227898597717
Epoch 663 completed out of 1000 loss: 2.7705699801445007
Epoch 664 completed out of 1000 loss: 2.770515739917755
Epoch 665 completed out of 1000 loss: 2.7704596519470215
Epoch 666 completed out of 1000 loss: 2.770404875278473
Epoch 667 completed out of 1000 loss: 2.7703478932380676
Epoch 668 completed out of 1000 loss: 2.770293653011322
Epoch 669 completed out of 1000 loss: 2.7702366709709167
Epoch 670 completed out of 1000 loss: 2.770182251930237
Epoch 671 completed out of 1000 loss: 2.770124137401581
Epoch 672 completed out of 1000 loss: 2.7700687646865845
Epoch 673 completed out of 1000 loss: 2.7700114250183105
Epoch 674 completed out of 1000 loss: 2.7699554562568665
Epoch 675 completed out of 1000 loss: 2.7699021100997925
Epoch 676 completed out of 1000 loss: 2.769848346710205
Epoch 677 completed out of 1000 loss: 2.769791901111603
Epoch 678 completed out of 1000 loss: 2.769737660884857
Epoch 679 completed out of 1000 loss: 2.7696810960769653
Epoch 680 completed out of 1000 loss: 2.7696253657341003
Epoch 681 completed out of 1000 loss: 2.769569516181946
Epoch 682 completed out of 1000 loss: 2.7695142030715942
Epoch 683 completed out of 1000 loss: 2.769457757472992
Epoch 684 completed out of 1000 loss: 2.7694036960601807
Epoch 685 completed out of 1000 loss: 2.7693458199501038
Epoch 686 completed out of 1000 loss: 2.769291043281555
Epoch 687 completed out of 1000 loss: 2.7692336440086365
Epoch 688 completed out of 1000 loss: 2.7691773176193237
Epoch 689 completed out of 1000 loss: 2.7691216468811035
Epoch 690 completed out of 1000 loss: 2.769063353538513
Epoch 691 completed out of 1000 loss: 2.769008219242096
Epoch 692 completed out of 1000 loss: 2.768950045108795
Epoch 693 completed out of 1000 loss: 2.768890917301178
Epoch 694 completed out of 1000 loss: 2.768833339214325
Epoch 695 completed out of 1000 loss: 2.7687753438949585
Epoch 696 completed out of 1000 loss: 2.7687183618545532
Epoch 697 completed out of 1000 loss: 2.768659770488739
Epoch 698 completed out of 1000 loss: 2.7685999870300293
Epoch 699 completed out of 1000 loss: 2.768541395664215
Epoch 700 completed out of 1000 loss: 2.768480122089386
Epoch 701 completed out of 1000 loss: 2.7684205174446106
Epoch 702 completed out of 1000 loss: 2.7683597803115845
Epoch 703 completed out of 1000 loss: 2.7682997584342957
Epoch 704 completed out of 1000 loss: 2.7682382464408875
Epoch 705 completed out of 1000 loss: 2.768179953098297
Epoch 706 completed out of 1000 loss: 2.7681156396865845
Epoch 707 completed out of 1000 loss: 2.7680550813674927
Epoch 708 completed out of 1000 loss: 2.7679949402809143
Epoch 709 completed out of 1000 loss: 2.7679327726364136
Epoch 710 completed out of 1000 loss: 2.767870783805847
Epoch 711 completed out of 1000 loss: 2.767810046672821
Epoch 712 completed out of 1000 loss: 2.767748475074768
Epoch 713 completed out of 1000 loss: 2.7676879167556763
Epoch 714 completed out of 1000 loss: 2.7676243782043457
Epoch 715 completed out of 1000 loss: 2.7675641775131226
Epoch 716 completed out of 1000 loss: 2.767500102519989
Epoch 717 completed out of 1000 loss: 2.7674371004104614
Epoch 718 completed out of 1000 loss: 2.7673749327659607
Epoch 719 completed out of 1000 loss: 2.7673102021217346
Epoch 720 completed out of 1000 loss: 2.7672460079193115
Epoch 721 completed out of 1000 loss: 2.767183482646942
Epoch 722 completed out of 1000 loss: 2.7671183943748474
Epoch 723 completed out of 1000 loss: 2.7670536637306213
Epoch 724 completed out of 1000 loss: 2.7669896483421326
Epoch 725 completed out of 1000 loss: 2.766924262046814
Epoch 726 completed out of 1000 loss: 2.7668577432632446
Epoch 727 completed out of 1000 loss: 2.7667912244796753
Epoch 728 completed out of 1000 loss: 2.7667232751846313
Epoch 729 completed out of 1000 loss: 2.7666563391685486
Epoch 730 completed out of 1000 loss: 2.7665878534317017
Epoch 731 completed out of 1000 loss: 2.7665191888809204
Epoch 732 completed out of 1000 loss: 2.766451418399811
Epoch 733 completed out of 1000 loss: 2.766380250453949
Epoch 734 completed out of 1000 loss: 2.7663114070892334
Epoch 735 completed out of 1000 loss: 2.766241729259491
Epoch 736 completed out of 1000 loss: 2.7661715745925903
Epoch 737 completed out of 1000 loss: 2.7661025524139404
Epoch 738 completed out of 1000 loss: 2.766030251979828
Epoch 739 completed out of 1000 loss: 2.7659600973129272
Epoch 740 completed out of 1000 loss: 2.765892744064331
Epoch 741 completed out of 1000 loss: 2.765818774700165
Epoch 742 completed out of 1000 loss: 2.765748679637909
Epoch 743 completed out of 1000 loss: 2.7656801342964172
Epoch 744 completed out of 1000 loss: 2.7656063437461853
Epoch 745 completed out of 1000 loss: 2.7655364871025085
Epoch 746 completed out of 1000 loss: 2.7654643654823303
Epoch 747 completed out of 1000 loss: 2.7653931975364685
Epoch 748 completed out of 1000 loss: 2.7653209567070007
Epoch 749 completed out of 1000 loss: 2.7652480602264404
Epoch 750 completed out of 1000 loss: 2.765175461769104
Epoch 751 completed out of 1000 loss: 2.765102744102478
Epoch 752 completed out of 1000 loss: 2.765028417110443
Epoch 753 completed out of 1000 loss: 2.764953374862671
Epoch 754 completed out of 1000 loss: 2.764877676963806
Epoch 755 completed out of 1000 loss: 2.764802575111389
Epoch 756 completed out of 1000 loss: 2.764725923538208
Epoch 757 completed out of 1000 loss: 2.7646488547325134
Epoch 758 completed out of 1000 loss: 2.7645720839500427
Epoch 759 completed out of 1000 loss: 2.7644920349121094
Epoch 760 completed out of 1000 loss: 2.7644113302230835
Epoch 761 completed out of 1000 loss: 2.7643293738365173
Epoch 762 completed out of 1000 loss: 2.764243185520172
Epoch 763 completed out of 1000 loss: 2.7641594409942627
Epoch 764 completed out of 1000 loss: 2.7640733122825623
Epoch 765 completed out of 1000 loss: 2.7639829516410828
Epoch 766 completed out of 1000 loss: 2.763892352581024
Epoch 767 completed out of 1000 loss: 2.7637996077537537
Epoch 768 completed out of 1000 loss: 2.763703167438507
Epoch 769 completed out of 1000 loss: 2.7636048197746277
Epoch 770 completed out of 1000 loss: 2.763500988483429
Epoch 771 completed out of 1000 loss: 2.7633941769599915
Epoch 772 completed out of 1000 loss: 2.7632834911346436
Epoch 773 completed out of 1000 loss: 2.763165831565857
Epoch 774 completed out of 1000 loss: 2.763044059276581
Epoch 775 completed out of 1000 loss: 2.762913465499878
Epoch 776 completed out of 1000 loss: 2.762776255607605
Epoch 777 completed out of 1000 loss: 2.762631416320801
Epoch 778 completed out of 1000 loss: 2.7624740600585938
Epoch 779 completed out of 1000 loss: 2.762305200099945
Epoch 780 completed out of 1000 loss: 2.7621224522590637
Epoch 781 completed out of 1000 loss: 2.7619211077690125
Epoch 782 completed out of 1000 loss: 2.7617005109786987
Epoch 783 completed out of 1000 loss: 2.761456251144409
Epoch 784 completed out of 1000 loss: 2.761184871196747
Epoch 785 completed out of 1000 loss: 2.7608800530433655
Epoch 786 completed out of 1000 loss: 2.7605336904525757
Epoch 787 completed out of 1000 loss: 2.760142743587494
Epoch 788 completed out of 1000 loss: 2.7596988677978516
Epoch 789 completed out of 1000 loss: 2.7591904997825623
Epoch 790 completed out of 1000 loss: 2.758613646030426
Epoch 791 completed out of 1000 loss: 2.757961690425873
Epoch 792 completed out of 1000 loss: 2.7572312355041504
Epoch 793 completed out of 1000 loss: 2.7564300298690796
Epoch 794 completed out of 1000 loss: 2.7555651664733887
Epoch 795 completed out of 1000 loss: 2.7546643018722534
Epoch 796 completed out of 1000 loss: 2.7537620663642883
Epoch 797 completed out of 1000 loss: 2.7528958916664124
Epoch 798 completed out of 1000 loss: 2.752104699611664
Epoch 799 completed out of 1000 loss: 2.751415252685547
Epoch 800 completed out of 1000 loss: 2.7508426308631897
Epoch 801 completed out of 1000 loss: 2.7503785490989685
Epoch 802 completed out of 1000 loss: 2.7500118017196655
Epoch 803 completed out of 1000 loss: 2.749715566635132
Epoch 804 completed out of 1000 loss: 2.7494714856147766
Epoch 805 completed out of 1000 loss: 2.7492583990097046
Epoch 806 completed out of 1000 loss: 2.749069094657898
Epoch 807 completed out of 1000 loss: 2.7488948106765747
Epoch 808 completed out of 1000 loss: 2.7487279176712036
Epoch 809 completed out of 1000 loss: 2.748568117618561
Epoch 810 completed out of 1000 loss: 2.748414695262909
Epoch 811 completed out of 1000 loss: 2.7482664585113525
Epoch 812 completed out of 1000 loss: 2.7481213212013245
Epoch 813 completed out of 1000 loss: 2.7479811906814575
Epoch 814 completed out of 1000 loss: 2.747841238975525
Epoch 815 completed out of 1000 loss: 2.7477093935012817
Epoch 816 completed out of 1000 loss: 2.7475791573524475
Epoch 817 completed out of 1000 loss: 2.7474502325057983
Epoch 818 completed out of 1000 loss: 2.747321307659149
Epoch 819 completed out of 1000 loss: 2.7471989393234253
Epoch 820 completed out of 1000 loss: 2.7470812797546387
Epoch 821 completed out of 1000 loss: 2.746963381767273
Epoch 822 completed out of 1000 loss: 2.746850907802582
Epoch 823 completed out of 1000 loss: 2.746748626232147
Epoch 824 completed out of 1000 loss: 2.7466429471969604
Epoch 825 completed out of 1000 loss: 2.7465405464172363
Epoch 826 completed out of 1000 loss: 2.7464430928230286
Epoch 827 completed out of 1000 loss: 2.7463483214378357
Epoch 828 completed out of 1000 loss: 2.746256470680237
Epoch 829 completed out of 1000 loss: 2.746166706085205
Epoch 830 completed out of 1000 loss: 2.746080458164215
Epoch 831 completed out of 1000 loss: 2.745997905731201
Epoch 832 completed out of 1000 loss: 2.745917499065399
Epoch 833 completed out of 1000 loss: 2.745838224887848
Epoch 834 completed out of 1000 loss: 2.745762825012207
Epoch 835 completed out of 1000 loss: 2.7456888556480408
Epoch 836 completed out of 1000 loss: 2.745615303516388
Epoch 837 completed out of 1000 loss: 2.7455447912216187
Epoch 838 completed out of 1000 loss: 2.745478093624115
Epoch 839 completed out of 1000 loss: 2.7454090118408203
Epoch 840 completed out of 1000 loss: 2.745343506336212
Epoch 841 completed out of 1000 loss: 2.7452786564826965
Epoch 842 completed out of 1000 loss: 2.745214283466339
Epoch 843 completed out of 1000 loss: 2.7451505064964294
Epoch 844 completed out of 1000 loss: 2.7450905442237854
Epoch 845 completed out of 1000 loss: 2.745029091835022
Epoch 846 completed out of 1000 loss: 2.744969606399536
Epoch 847 completed out of 1000 loss: 2.7449106574058533
Epoch 848 completed out of 1000 loss: 2.744853138923645
Epoch 849 completed out of 1000 loss: 2.744796931743622
Epoch 850 completed out of 1000 loss: 2.744738519191742
Epoch 851 completed out of 1000 loss: 2.744682550430298
Epoch 852 completed out of 1000 loss: 2.744627833366394
Epoch 853 completed out of 1000 loss: 2.7445732951164246
Epoch 854 completed out of 1000 loss: 2.7445173859596252
Epoch 855 completed out of 1000 loss: 2.744463860988617
Epoch 856 completed out of 1000 loss: 2.744410455226898
Epoch 857 completed out of 1000 loss: 2.744357705116272
Epoch 858 completed out of 1000 loss: 2.744305908679962
Epoch 859 completed out of 1000 loss: 2.7442525029182434
Epoch 860 completed out of 1000 loss: 2.744201362133026
Epoch 861 completed out of 1000 loss: 2.7441473603248596
Epoch 862 completed out of 1000 loss: 2.7440947890281677
Epoch 863 completed out of 1000 loss: 2.7440417408943176
Epoch 864 completed out of 1000 loss: 2.7439870834350586
Epoch 865 completed out of 1000 loss: 2.74393630027771
Epoch 866 completed out of 1000 loss: 2.7438845038414
Epoch 867 completed out of 1000 loss: 2.7438344955444336
Epoch 868 completed out of 1000 loss: 2.743791103363037
Epoch 869 completed out of 1000 loss: 2.7437429428100586
Epoch 870 completed out of 1000 loss: 2.7436946630477905
Epoch 871 completed out of 1000 loss: 2.743648052215576
Epoch 872 completed out of 1000 loss: 2.7436022758483887
Epoch 873 completed out of 1000 loss: 2.7435561418533325
Epoch 874 completed out of 1000 loss: 2.743511199951172
Epoch 875 completed out of 1000 loss: 2.7434659600257874
Epoch 876 completed out of 1000 loss: 2.743418276309967
Epoch 877 completed out of 1000 loss: 2.743373930454254
Epoch 878 completed out of 1000 loss: 2.7433308362960815
Epoch 879 completed out of 1000 loss: 2.74328476190567
Epoch 880 completed out of 1000 loss: 2.7432425022125244
Epoch 881 completed out of 1000 loss: 2.743199586868286
Epoch 882 completed out of 1000 loss: 2.74315744638443
Epoch 883 completed out of 1000 loss: 2.7431140542030334
Epoch 884 completed out of 1000 loss: 2.7430712580680847
Epoch 885 completed out of 1000 loss: 2.7430297136306763
Epoch 886 completed out of 1000 loss: 2.7429875135421753
Epoch 887 completed out of 1000 loss: 2.7429452538490295
Epoch 888 completed out of 1000 loss: 2.742904543876648
Epoch 889 completed out of 1000 loss: 2.742863178253174
Epoch 890 completed out of 1000 loss: 2.7428221106529236
Epoch 891 completed out of 1000 loss: 2.74278062582016
Epoch 892 completed out of 1000 loss: 2.7427412271499634
Epoch 893 completed out of 1000 loss: 2.742701470851898
Epoch 894 completed out of 1000 loss: 2.7426605224609375
Epoch 895 completed out of 1000 loss: 2.742620527744293
Epoch 896 completed out of 1000 loss: 2.742581307888031
Epoch 897 completed out of 1000 loss: 2.7425405979156494
Epoch 898 completed out of 1000 loss: 2.7425015568733215
Epoch 899 completed out of 1000 loss: 2.7424615621566772
Epoch 900 completed out of 1000 loss: 2.742422938346863
Epoch 901 completed out of 1000 loss: 2.7423861622810364
Epoch 902 completed out of 1000 loss: 2.7423458099365234
Epoch 903 completed out of 1000 loss: 2.7423064708709717
Epoch 904 completed out of 1000 loss: 2.742268979549408
Epoch 905 completed out of 1000 loss: 2.74222993850708
Epoch 906 completed out of 1000 loss: 2.7421929836273193
Epoch 907 completed out of 1000 loss: 2.742154896259308
Epoch 908 completed out of 1000 loss: 2.742120385169983
Epoch 909 completed out of 1000 loss: 2.7420802116394043
Epoch 910 completed out of 1000 loss: 2.7420429587364197
Epoch 911 completed out of 1000 loss: 2.7420063614845276
Epoch 912 completed out of 1000 loss: 2.741967260837555
Epoch 913 completed out of 1000 loss: 2.7419317960739136
Epoch 914 completed out of 1000 loss: 2.741893768310547
Epoch 915 completed out of 1000 loss: 2.7418601512908936
Epoch 916 completed out of 1000 loss: 2.741821825504303
Epoch 917 completed out of 1000 loss: 2.7417837381362915
Epoch 918 completed out of 1000 loss: 2.74174964427948
Epoch 919 completed out of 1000 loss: 2.7417120337486267
Epoch 920 completed out of 1000 loss: 2.741676151752472
Epoch 921 completed out of 1000 loss: 2.741639494895935
Epoch 922 completed out of 1000 loss: 2.7416046261787415
Epoch 923 completed out of 1000 loss: 2.741567373275757
Epoch 924 completed out of 1000 loss: 2.741532802581787
Epoch 925 completed out of 1000 loss: 2.741495907306671
Epoch 926 completed out of 1000 loss: 2.7414578795433044
Epoch 927 completed out of 1000 loss: 2.741421163082123
Epoch 928 completed out of 1000 loss: 2.7413859963417053
Epoch 929 completed out of 1000 loss: 2.7413492798805237
Epoch 930 completed out of 1000 loss: 2.7413124442100525
Epoch 931 completed out of 1000 loss: 2.7412760853767395
Epoch 932 completed out of 1000 loss: 2.741239905357361
Epoch 933 completed out of 1000 loss: 2.741203010082245
Epoch 934 completed out of 1000 loss: 2.741167187690735
Epoch 935 completed out of 1000 loss: 2.741131365299225
Epoch 936 completed out of 1000 loss: 2.741095542907715
Epoch 937 completed out of 1000 loss: 2.7410584092140198
Epoch 938 completed out of 1000 loss: 2.7410229444503784
Epoch 939 completed out of 1000 loss: 2.740985155105591
Epoch 940 completed out of 1000 loss: 2.740947961807251
Epoch 941 completed out of 1000 loss: 2.7409133911132812
Epoch 942 completed out of 1000 loss: 2.7408782243728638
Epoch 943 completed out of 1000 loss: 2.7408424615859985
Epoch 944 completed out of 1000 loss: 2.7408047318458557
Epoch 945 completed out of 1000 loss: 2.7407681941986084
Epoch 946 completed out of 1000 loss: 2.7407339811325073
Epoch 947 completed out of 1000 loss: 2.740695297718048
Epoch 948 completed out of 1000 loss: 2.7406599521636963
Epoch 949 completed out of 1000 loss: 2.7406240701675415
Epoch 950 completed out of 1000 loss: 2.7405897974967957
Epoch 951 completed out of 1000 loss: 2.740553319454193
Epoch 952 completed out of 1000 loss: 2.740517556667328
Epoch 953 completed out of 1000 loss: 2.7404826283454895
Epoch 954 completed out of 1000 loss: 2.7404488921165466
Epoch 955 completed out of 1000 loss: 2.7404130697250366
Epoch 956 completed out of 1000 loss: 2.7403769493103027
Epoch 957 completed out of 1000 loss: 2.7403444051742554
Epoch 958 completed out of 1000 loss: 2.740309178829193
Epoch 959 completed out of 1000 loss: 2.74027681350708
Epoch 960 completed out of 1000 loss: 2.7402420043945312
Epoch 961 completed out of 1000 loss: 2.7402082085609436
Epoch 962 completed out of 1000 loss: 2.740174889564514
Epoch 963 completed out of 1000 loss: 2.740141272544861
Epoch 964 completed out of 1000 loss: 2.7401066422462463
Epoch 965 completed out of 1000 loss: 2.7400736212730408
Epoch 966 completed out of 1000 loss: 2.7400391697883606
Epoch 967 completed out of 1000 loss: 2.7400054931640625
Epoch 968 completed out of 1000 loss: 2.7399730682373047
Epoch 969 completed out of 1000 loss: 2.7399399876594543
Epoch 970 completed out of 1000 loss: 2.7399057149887085
Epoch 971 completed out of 1000 loss: 2.739872455596924
Epoch 972 completed out of 1000 loss: 2.739839732646942
Epoch 973 completed out of 1000 loss: 2.7398065328598022
Epoch 974 completed out of 1000 loss: 2.739770829677582
Epoch 975 completed out of 1000 loss: 2.7397390604019165
Epoch 976 completed out of 1000 loss: 2.739706575870514
Epoch 977 completed out of 1000 loss: 2.739673614501953
Epoch 978 completed out of 1000 loss: 2.739642381668091
Epoch 979 completed out of 1000 loss: 2.7396100759506226
Epoch 980 completed out of 1000 loss: 2.739576041698456
Epoch 981 completed out of 1000 loss: 2.739543080329895
Epoch 982 completed out of 1000 loss: 2.739511489868164
Epoch 983 completed out of 1000 loss: 2.7394787669181824
Epoch 984 completed out of 1000 loss: 2.739446699619293
Epoch 985 completed out of 1000 loss: 2.739414930343628
Epoch 986 completed out of 1000 loss: 2.7393828630447388
Epoch 987 completed out of 1000 loss: 2.7393504977226257
Epoch 988 completed out of 1000 loss: 2.739319324493408
Epoch 989 completed out of 1000 loss: 2.7392889857292175
Epoch 990 completed out of 1000 loss: 2.7392572164535522
Epoch 991 completed out of 1000 loss: 2.739225447177887
Epoch 992 completed out of 1000 loss: 2.7391969561576843
Epoch 993 completed out of 1000 loss: 2.7391658425331116
Epoch 994 completed out of 1000 loss: 2.739135980606079
Epoch 995 completed out of 1000 loss: 2.7391051650047302
Epoch 996 completed out of 1000 loss: 2.7390735745429993
Epoch 997 completed out of 1000 loss: 2.739043653011322
Epoch 998 completed out of 1000 loss: 2.739014208316803
Epoch 999 completed out of 1000 loss: 2.738983154296875
Tensor("Softmax_1:0", shape=(?, 2), dtype=float32)
[[0.25691307 0.743087  ]
 [0.7167285  0.28327143]
 [0.17122412 0.82877594]
 ...
 [0.92609924 0.07390072]
 [0.92609924 0.07390072]
 [0.6976961  0.30230388]]
False
accuracy on train: 0.57882416
Accuracy on test : 0.56207913
[[1. 0.]
 [1. 0.]
 [0. 1.]
 ...
 [0. 1.]
 [0. 1.]
 [1. 0.]]
[1 0 1 ... 0 0 0]
In [115]:
cm8 = confusion_matrix(df_yb,predy2)
plt.figure(figsize = (10,7))
cm8df = pd.DataFrame(cm8, index = ["1",'0'], columns = ["1",'0'])
seaborn.set(font_scale=1.4)#for label size
seaborn.heatmap(cm8df, annot=True,annot_kws={"size": 16})# font size

# plt.show()
# ig, ax = plt.subplots()
# cm = confusion_matrix(df_yb, predy2)

# im = ax.imshow(cm, interpolation='nearest', cmap=plt.cm.Blues)
# ax.figure.colorbar(im, ax=ax)

# ax.set(yticks=[-0.5, 1.5], 
#        xticks=[0, 1], 
#        yticklabels=[1,0], 
#        xticklabels=[1,0])
# # ax.yaxis.set_major_locator(ticker.IndexLocater(base=1, offset=0.5))
# ax.yaxis.set_major_locator(mat.ticker.IndexLocator(base=1, offset=0.5))
# seaborn.heatmap(cm8df, annot=True)
Out[115]:
<matplotlib.axes._subplots.AxesSubplot at 0x20f0c287b48>
In [116]:
fpr, tpr, _ = roc_curve(predy2, df_yb, drop_intermediate=False)
plt.plot(fpr, tpr, color='red')
plt.xlabel('fpr')
plt.ylabel('tpr')
plt.title('ROC curve fr Tensorflow 2')
plt.show()
In [117]:
losslist3,predy3 = train_neural_network2(x, dfabde, df_yabdeten, dfc, df_ycten)
predy3=np.argmax(predy3,axis=1)
Epoch 0 completed out of 1000 loss: 2.9244229793548584
Epoch 1 completed out of 1000 loss: 2.919194221496582
Epoch 2 completed out of 1000 loss: 2.914746105670929
Epoch 3 completed out of 1000 loss: 2.9109460711479187
Epoch 4 completed out of 1000 loss: 2.9076281785964966
Epoch 5 completed out of 1000 loss: 2.9046390056610107
Epoch 6 completed out of 1000 loss: 2.901992619037628
Epoch 7 completed out of 1000 loss: 2.8996741771698
Epoch 8 completed out of 1000 loss: 2.8975101113319397
Epoch 9 completed out of 1000 loss: 2.8954278230667114
Epoch 10 completed out of 1000 loss: 2.8934370279312134
Epoch 11 completed out of 1000 loss: 2.8915507197380066
Epoch 12 completed out of 1000 loss: 2.889755427837372
Epoch 13 completed out of 1000 loss: 2.888034224510193
Epoch 14 completed out of 1000 loss: 2.8863860368728638
Epoch 15 completed out of 1000 loss: 2.884793519973755
Epoch 16 completed out of 1000 loss: 2.883127987384796
Epoch 17 completed out of 1000 loss: 2.881484627723694
Epoch 18 completed out of 1000 loss: 2.8801158666610718
Epoch 19 completed out of 1000 loss: 2.878804862499237
Epoch 20 completed out of 1000 loss: 2.8775076866149902
Epoch 21 completed out of 1000 loss: 2.8761950731277466
Epoch 22 completed out of 1000 loss: 2.874862849712372
Epoch 23 completed out of 1000 loss: 2.8735883235931396
Epoch 24 completed out of 1000 loss: 2.8723955154418945
Epoch 25 completed out of 1000 loss: 2.8712244033813477
Epoch 26 completed out of 1000 loss: 2.870159089565277
Epoch 27 completed out of 1000 loss: 2.869152247905731
Epoch 28 completed out of 1000 loss: 2.868194043636322
Epoch 29 completed out of 1000 loss: 2.8672783970832825
Epoch 30 completed out of 1000 loss: 2.8663934469223022
Epoch 31 completed out of 1000 loss: 2.86553555727005
Epoch 32 completed out of 1000 loss: 2.8647085428237915
Epoch 33 completed out of 1000 loss: 2.863907814025879
Epoch 34 completed out of 1000 loss: 2.8631184697151184
Epoch 35 completed out of 1000 loss: 2.86233651638031
Epoch 36 completed out of 1000 loss: 2.861572802066803
Epoch 37 completed out of 1000 loss: 2.860819399356842
Epoch 38 completed out of 1000 loss: 2.8600786924362183
Epoch 39 completed out of 1000 loss: 2.859370470046997
Epoch 40 completed out of 1000 loss: 2.85870897769928
Epoch 41 completed out of 1000 loss: 2.858071804046631
Epoch 42 completed out of 1000 loss: 2.8574588894844055
Epoch 43 completed out of 1000 loss: 2.856861650943756
Epoch 44 completed out of 1000 loss: 2.856281280517578
Epoch 45 completed out of 1000 loss: 2.8557138442993164
Epoch 46 completed out of 1000 loss: 2.855157971382141
Epoch 47 completed out of 1000 loss: 2.854611396789551
Epoch 48 completed out of 1000 loss: 2.8540656566619873
Epoch 49 completed out of 1000 loss: 2.8535228967666626
Epoch 50 completed out of 1000 loss: 2.8529734015464783
Epoch 51 completed out of 1000 loss: 2.852437138557434
Epoch 52 completed out of 1000 loss: 2.851910948753357
Epoch 53 completed out of 1000 loss: 2.851390242576599
Epoch 54 completed out of 1000 loss: 2.85087651014328
Epoch 55 completed out of 1000 loss: 2.850373685359955
Epoch 56 completed out of 1000 loss: 2.8498820662498474
Epoch 57 completed out of 1000 loss: 2.849392831325531
Epoch 58 completed out of 1000 loss: 2.848921298980713
Epoch 59 completed out of 1000 loss: 2.848466396331787
Epoch 60 completed out of 1000 loss: 2.8480154275894165
Epoch 61 completed out of 1000 loss: 2.8475730419158936
Epoch 62 completed out of 1000 loss: 2.8471384048461914
Epoch 63 completed out of 1000 loss: 2.8467050790786743
Epoch 64 completed out of 1000 loss: 2.846271336078644
Epoch 65 completed out of 1000 loss: 2.845837652683258
Epoch 66 completed out of 1000 loss: 2.8454026579856873
Epoch 67 completed out of 1000 loss: 2.844947338104248
Epoch 68 completed out of 1000 loss: 2.8444493412971497
Epoch 69 completed out of 1000 loss: 2.8439393043518066
Epoch 70 completed out of 1000 loss: 2.8434356451034546
Epoch 71 completed out of 1000 loss: 2.8429518342018127
Epoch 72 completed out of 1000 loss: 2.8424506187438965
Epoch 73 completed out of 1000 loss: 2.8419164419174194
Epoch 74 completed out of 1000 loss: 2.8413771390914917
Epoch 75 completed out of 1000 loss: 2.8408414721488953
Epoch 76 completed out of 1000 loss: 2.840372920036316
Epoch 77 completed out of 1000 loss: 2.839918792247772
Epoch 78 completed out of 1000 loss: 2.8394500613212585
Epoch 79 completed out of 1000 loss: 2.838973104953766
Epoch 80 completed out of 1000 loss: 2.838523030281067
Epoch 81 completed out of 1000 loss: 2.838067948818207
Epoch 82 completed out of 1000 loss: 2.8376607298851013
Epoch 83 completed out of 1000 loss: 2.8372552394866943
Epoch 84 completed out of 1000 loss: 2.8368359208106995
Epoch 85 completed out of 1000 loss: 2.836394727230072
Epoch 86 completed out of 1000 loss: 2.835925340652466
Epoch 87 completed out of 1000 loss: 2.835451364517212
Epoch 88 completed out of 1000 loss: 2.835013687610626
Epoch 89 completed out of 1000 loss: 2.8346166610717773
Epoch 90 completed out of 1000 loss: 2.8342031240463257
Epoch 91 completed out of 1000 loss: 2.8337767124176025
Epoch 92 completed out of 1000 loss: 2.8333266377449036
Epoch 93 completed out of 1000 loss: 2.8328250646591187
Epoch 94 completed out of 1000 loss: 2.83229523897171
Epoch 95 completed out of 1000 loss: 2.8318153619766235
Epoch 96 completed out of 1000 loss: 2.831334948539734
Epoch 97 completed out of 1000 loss: 2.830873668193817
Epoch 98 completed out of 1000 loss: 2.830437183380127
Epoch 99 completed out of 1000 loss: 2.8299981355667114
Epoch 100 completed out of 1000 loss: 2.829544961452484
Epoch 101 completed out of 1000 loss: 2.8290550112724304
Epoch 102 completed out of 1000 loss: 2.828588128089905
Epoch 103 completed out of 1000 loss: 2.8281437754631042
Epoch 104 completed out of 1000 loss: 2.8277034163475037
Epoch 105 completed out of 1000 loss: 2.8272692561149597
Epoch 106 completed out of 1000 loss: 2.826836407184601
Epoch 107 completed out of 1000 loss: 2.826406478881836
Epoch 108 completed out of 1000 loss: 2.8259776830673218
Epoch 109 completed out of 1000 loss: 2.825547754764557
Epoch 110 completed out of 1000 loss: 2.8251174688339233
Epoch 111 completed out of 1000 loss: 2.8246888518333435
Epoch 112 completed out of 1000 loss: 2.8242634534835815
Epoch 113 completed out of 1000 loss: 2.8238384127616882
Epoch 114 completed out of 1000 loss: 2.8234108090400696
Epoch 115 completed out of 1000 loss: 2.822985887527466
Epoch 116 completed out of 1000 loss: 2.822559952735901
Epoch 117 completed out of 1000 loss: 2.822137951850891
Epoch 118 completed out of 1000 loss: 2.8217167258262634
Epoch 119 completed out of 1000 loss: 2.82129567861557
Epoch 120 completed out of 1000 loss: 2.8208762407302856
Epoch 121 completed out of 1000 loss: 2.8204562664031982
Epoch 122 completed out of 1000 loss: 2.8200311064720154
Epoch 123 completed out of 1000 loss: 2.8196003437042236
Epoch 124 completed out of 1000 loss: 2.8191505074501038
Epoch 125 completed out of 1000 loss: 2.8186817169189453
Epoch 126 completed out of 1000 loss: 2.8181775212287903
Epoch 127 completed out of 1000 loss: 2.817654013633728
Epoch 128 completed out of 1000 loss: 2.8171340823173523
Epoch 129 completed out of 1000 loss: 2.816587507724762
Epoch 130 completed out of 1000 loss: 2.8160029649734497
Epoch 131 completed out of 1000 loss: 2.8153886795043945
Epoch 132 completed out of 1000 loss: 2.8147873878479004
Epoch 133 completed out of 1000 loss: 2.8142147064208984
Epoch 134 completed out of 1000 loss: 2.813635468482971
Epoch 135 completed out of 1000 loss: 2.813003897666931
Epoch 136 completed out of 1000 loss: 2.8123950958251953
Epoch 137 completed out of 1000 loss: 2.8118200302124023
Epoch 138 completed out of 1000 loss: 2.811294436454773
Epoch 139 completed out of 1000 loss: 2.8108088970184326
Epoch 140 completed out of 1000 loss: 2.8103466629981995
Epoch 141 completed out of 1000 loss: 2.809886634349823
Epoch 142 completed out of 1000 loss: 2.8094277381896973
Epoch 143 completed out of 1000 loss: 2.8089643716812134
Epoch 144 completed out of 1000 loss: 2.8084933161735535
Epoch 145 completed out of 1000 loss: 2.8079856634140015
Epoch 146 completed out of 1000 loss: 2.8074352741241455
Epoch 147 completed out of 1000 loss: 2.8069116473197937
Epoch 148 completed out of 1000 loss: 2.8064093589782715
Epoch 149 completed out of 1000 loss: 2.805915355682373
Epoch 150 completed out of 1000 loss: 2.805424392223358
Epoch 151 completed out of 1000 loss: 2.8049274682998657
Epoch 152 completed out of 1000 loss: 2.804421305656433
Epoch 153 completed out of 1000 loss: 2.8039249777793884
Epoch 154 completed out of 1000 loss: 2.8034444451332092
Epoch 155 completed out of 1000 loss: 2.8029792308807373
Epoch 156 completed out of 1000 loss: 2.8025201559066772
Epoch 157 completed out of 1000 loss: 2.802062451839447
Epoch 158 completed out of 1000 loss: 2.8016069531440735
Epoch 159 completed out of 1000 loss: 2.8011617064476013
Epoch 160 completed out of 1000 loss: 2.8007341027259827
Epoch 161 completed out of 1000 loss: 2.8003153800964355
Epoch 162 completed out of 1000 loss: 2.7998947501182556
Epoch 163 completed out of 1000 loss: 2.7994741797447205
Epoch 164 completed out of 1000 loss: 2.7990609407424927
Epoch 165 completed out of 1000 loss: 2.7986587285995483
Epoch 166 completed out of 1000 loss: 2.7982555627822876
Epoch 167 completed out of 1000 loss: 2.7978599071502686
Epoch 168 completed out of 1000 loss: 2.797469735145569
Epoch 169 completed out of 1000 loss: 2.797081410884857
Epoch 170 completed out of 1000 loss: 2.7967084646224976
Epoch 171 completed out of 1000 loss: 2.796341061592102
Epoch 172 completed out of 1000 loss: 2.7959901690483093
Epoch 173 completed out of 1000 loss: 2.7956554293632507
Epoch 174 completed out of 1000 loss: 2.795344650745392
Epoch 175 completed out of 1000 loss: 2.795048236846924
Epoch 176 completed out of 1000 loss: 2.7947603464126587
Epoch 177 completed out of 1000 loss: 2.7944809198379517
Epoch 178 completed out of 1000 loss: 2.794207453727722
Epoch 179 completed out of 1000 loss: 2.793946087360382
Epoch 180 completed out of 1000 loss: 2.7936936020851135
Epoch 181 completed out of 1000 loss: 2.7934530377388
Epoch 182 completed out of 1000 loss: 2.7932193875312805
Epoch 183 completed out of 1000 loss: 2.7929941415786743
Epoch 184 completed out of 1000 loss: 2.792778789997101
Epoch 185 completed out of 1000 loss: 2.792568624019623
Epoch 186 completed out of 1000 loss: 2.792370855808258
Epoch 187 completed out of 1000 loss: 2.792172908782959
Epoch 188 completed out of 1000 loss: 2.791987657546997
Epoch 189 completed out of 1000 loss: 2.791806936264038
Epoch 190 completed out of 1000 loss: 2.791631519794464
Epoch 191 completed out of 1000 loss: 2.791465163230896
Epoch 192 completed out of 1000 loss: 2.791299521923065
Epoch 193 completed out of 1000 loss: 2.791144549846649
Epoch 194 completed out of 1000 loss: 2.790989875793457
Epoch 195 completed out of 1000 loss: 2.7908408641815186
Epoch 196 completed out of 1000 loss: 2.7906993627548218
Epoch 197 completed out of 1000 loss: 2.790561854839325
Epoch 198 completed out of 1000 loss: 2.79042786359787
Epoch 199 completed out of 1000 loss: 2.7902933955192566
Epoch 200 completed out of 1000 loss: 2.790166199207306
Epoch 201 completed out of 1000 loss: 2.790045142173767
Epoch 202 completed out of 1000 loss: 2.7899200916290283
Epoch 203 completed out of 1000 loss: 2.789803624153137
Epoch 204 completed out of 1000 loss: 2.7896859645843506
Epoch 205 completed out of 1000 loss: 2.78957337141037
Epoch 206 completed out of 1000 loss: 2.789463460445404
Epoch 207 completed out of 1000 loss: 2.789351761341095
Epoch 208 completed out of 1000 loss: 2.7892428636550903
Epoch 209 completed out of 1000 loss: 2.789137542247772
Epoch 210 completed out of 1000 loss: 2.789032816886902
Epoch 211 completed out of 1000 loss: 2.7889262437820435
Epoch 212 completed out of 1000 loss: 2.7888229489326477
Epoch 213 completed out of 1000 loss: 2.7887197136878967
Epoch 214 completed out of 1000 loss: 2.7886141538619995
Epoch 215 completed out of 1000 loss: 2.788507878780365
Epoch 216 completed out of 1000 loss: 2.7884010672569275
Epoch 217 completed out of 1000 loss: 2.788294792175293
Epoch 218 completed out of 1000 loss: 2.7881866693496704
Epoch 219 completed out of 1000 loss: 2.7880779504776
Epoch 220 completed out of 1000 loss: 2.787974536418915
Epoch 221 completed out of 1000 loss: 2.7878717184066772
Epoch 222 completed out of 1000 loss: 2.787776231765747
Epoch 223 completed out of 1000 loss: 2.787684977054596
Epoch 224 completed out of 1000 loss: 2.787596821784973
Epoch 225 completed out of 1000 loss: 2.7875099182128906
Epoch 226 completed out of 1000 loss: 2.787427544593811
Epoch 227 completed out of 1000 loss: 2.7873408794403076
Epoch 228 completed out of 1000 loss: 2.7872594594955444
Epoch 229 completed out of 1000 loss: 2.787178158760071
Epoch 230 completed out of 1000 loss: 2.787096381187439
Epoch 231 completed out of 1000 loss: 2.787014126777649
Epoch 232 completed out of 1000 loss: 2.786931574344635
Epoch 233 completed out of 1000 loss: 2.786848723888397
Epoch 234 completed out of 1000 loss: 2.786767303943634
Epoch 235 completed out of 1000 loss: 2.7866844534873962
Epoch 236 completed out of 1000 loss: 2.7866026759147644
Epoch 237 completed out of 1000 loss: 2.7865185737609863
Epoch 238 completed out of 1000 loss: 2.786439836025238
Epoch 239 completed out of 1000 loss: 2.7863593697547913
Epoch 240 completed out of 1000 loss: 2.786279559135437
Epoch 241 completed out of 1000 loss: 2.786199152469635
Epoch 242 completed out of 1000 loss: 2.786119222640991
Epoch 243 completed out of 1000 loss: 2.786039113998413
Epoch 244 completed out of 1000 loss: 2.7859610319137573
Epoch 245 completed out of 1000 loss: 2.7858816981315613
Epoch 246 completed out of 1000 loss: 2.7858023643493652
Epoch 247 completed out of 1000 loss: 2.785727620124817
Epoch 248 completed out of 1000 loss: 2.785649001598358
Epoch 249 completed out of 1000 loss: 2.7855722308158875
Epoch 250 completed out of 1000 loss: 2.78549587726593
Epoch 251 completed out of 1000 loss: 2.7854220867156982
Epoch 252 completed out of 1000 loss: 2.7853453755378723
Epoch 253 completed out of 1000 loss: 2.785270631313324
Epoch 254 completed out of 1000 loss: 2.7851935029029846
Epoch 255 completed out of 1000 loss: 2.7851191759109497
Epoch 256 completed out of 1000 loss: 2.7850411534309387
Epoch 257 completed out of 1000 loss: 2.7849678993225098
Epoch 258 completed out of 1000 loss: 2.7848896980285645
Epoch 259 completed out of 1000 loss: 2.7848169207572937
Epoch 260 completed out of 1000 loss: 2.7847387194633484
Epoch 261 completed out of 1000 loss: 2.784662365913391
Epoch 262 completed out of 1000 loss: 2.7845871448516846
Epoch 263 completed out of 1000 loss: 2.78451007604599
Epoch 264 completed out of 1000 loss: 2.784432113170624
Epoch 265 completed out of 1000 loss: 2.784354090690613
Epoch 266 completed out of 1000 loss: 2.7842759490013123
Epoch 267 completed out of 1000 loss: 2.784196972846985
Epoch 268 completed out of 1000 loss: 2.784116208553314
Epoch 269 completed out of 1000 loss: 2.784035861492157
Epoch 270 completed out of 1000 loss: 2.783957302570343
Epoch 271 completed out of 1000 loss: 2.7838760018348694
Epoch 272 completed out of 1000 loss: 2.783791184425354
Epoch 273 completed out of 1000 loss: 2.783708393573761
Epoch 274 completed out of 1000 loss: 2.7836248874664307
Epoch 275 completed out of 1000 loss: 2.7835394144058228
Epoch 276 completed out of 1000 loss: 2.7834540605545044
Epoch 277 completed out of 1000 loss: 2.7833643555641174
Epoch 278 completed out of 1000 loss: 2.783276677131653
Epoch 279 completed out of 1000 loss: 2.783185303211212
Epoch 280 completed out of 1000 loss: 2.7830936312675476
Epoch 281 completed out of 1000 loss: 2.7829965353012085
Epoch 282 completed out of 1000 loss: 2.7829038500785828
Epoch 283 completed out of 1000 loss: 2.7828025817871094
Epoch 284 completed out of 1000 loss: 2.782705068588257
Epoch 285 completed out of 1000 loss: 2.782601237297058
Epoch 286 completed out of 1000 loss: 2.782495677471161
Epoch 287 completed out of 1000 loss: 2.782387375831604
Epoch 288 completed out of 1000 loss: 2.782275915145874
Epoch 289 completed out of 1000 loss: 2.7821608185768127
Epoch 290 completed out of 1000 loss: 2.782044768333435
Epoch 291 completed out of 1000 loss: 2.781920611858368
Epoch 292 completed out of 1000 loss: 2.7817923426628113
Epoch 293 completed out of 1000 loss: 2.7816579937934875
Epoch 294 completed out of 1000 loss: 2.781523644924164
Epoch 295 completed out of 1000 loss: 2.781377077102661
Epoch 296 completed out of 1000 loss: 2.7812259197235107
Epoch 297 completed out of 1000 loss: 2.781066060066223
Epoch 298 completed out of 1000 loss: 2.7808967232704163
Epoch 299 completed out of 1000 loss: 2.7807204127311707
Epoch 300 completed out of 1000 loss: 2.780531108379364
Epoch 301 completed out of 1000 loss: 2.7803322672843933
Epoch 302 completed out of 1000 loss: 2.780119299888611
Epoch 303 completed out of 1000 loss: 2.779891312122345
Epoch 304 completed out of 1000 loss: 2.7796462774276733
Epoch 305 completed out of 1000 loss: 2.7793819308280945
Epoch 306 completed out of 1000 loss: 2.779097557067871
Epoch 307 completed out of 1000 loss: 2.7787858843803406
Epoch 308 completed out of 1000 loss: 2.7784449458122253
Epoch 309 completed out of 1000 loss: 2.7780714631080627
Epoch 310 completed out of 1000 loss: 2.7776594161987305
Epoch 311 completed out of 1000 loss: 2.7772005200386047
Epoch 312 completed out of 1000 loss: 2.7766934633255005
Epoch 313 completed out of 1000 loss: 2.7761241793632507
Epoch 314 completed out of 1000 loss: 2.775484263896942
Epoch 315 completed out of 1000 loss: 2.7747612595558167
Epoch 316 completed out of 1000 loss: 2.773948311805725
Epoch 317 completed out of 1000 loss: 2.77302885055542
Epoch 318 completed out of 1000 loss: 2.7719885110855103
Epoch 319 completed out of 1000 loss: 2.770820736885071
Epoch 320 completed out of 1000 loss: 2.7695202231407166
Epoch 321 completed out of 1000 loss: 2.7680927515029907
Epoch 322 completed out of 1000 loss: 2.7665544748306274
Epoch 323 completed out of 1000 loss: 2.764949679374695
Epoch 324 completed out of 1000 loss: 2.763324558734894
Epoch 325 completed out of 1000 loss: 2.7617433071136475
Epoch 326 completed out of 1000 loss: 2.760267674922943
Epoch 327 completed out of 1000 loss: 2.758947432041168
Epoch 328 completed out of 1000 loss: 2.757805049419403
Epoch 329 completed out of 1000 loss: 2.7568450570106506
Epoch 330 completed out of 1000 loss: 2.756048560142517
Epoch 331 completed out of 1000 loss: 2.755392551422119
Epoch 332 completed out of 1000 loss: 2.754848599433899
Epoch 333 completed out of 1000 loss: 2.754392385482788
Epoch 334 completed out of 1000 loss: 2.75400310754776
Epoch 335 completed out of 1000 loss: 2.7536616921424866
Epoch 336 completed out of 1000 loss: 2.7533618807792664
Epoch 337 completed out of 1000 loss: 2.753085970878601
Epoch 338 completed out of 1000 loss: 2.752833366394043
Epoch 339 completed out of 1000 loss: 2.7526037096977234
Epoch 340 completed out of 1000 loss: 2.752388596534729
Epoch 341 completed out of 1000 loss: 2.7521835565567017
Epoch 342 completed out of 1000 loss: 2.7519962787628174
Epoch 343 completed out of 1000 loss: 2.7518213987350464
Epoch 344 completed out of 1000 loss: 2.751657724380493
Epoch 345 completed out of 1000 loss: 2.7515016198158264
Epoch 346 completed out of 1000 loss: 2.7513606548309326
Epoch 347 completed out of 1000 loss: 2.751223623752594
Epoch 348 completed out of 1000 loss: 2.751097023487091
Epoch 349 completed out of 1000 loss: 2.7509766817092896
Epoch 350 completed out of 1000 loss: 2.750864326953888
Epoch 351 completed out of 1000 loss: 2.7507566809654236
Epoch 352 completed out of 1000 loss: 2.750655949115753
Epoch 353 completed out of 1000 loss: 2.7505581378936768
Epoch 354 completed out of 1000 loss: 2.750467360019684
Epoch 355 completed out of 1000 loss: 2.7503784894943237
Epoch 356 completed out of 1000 loss: 2.750296950340271
Epoch 357 completed out of 1000 loss: 2.7502159476280212
Epoch 358 completed out of 1000 loss: 2.7501387000083923
Epoch 359 completed out of 1000 loss: 2.7500638365745544
Epoch 360 completed out of 1000 loss: 2.7499924898147583
Epoch 361 completed out of 1000 loss: 2.749923348426819
Epoch 362 completed out of 1000 loss: 2.749856472015381
Epoch 363 completed out of 1000 loss: 2.7497901916503906
Epoch 364 completed out of 1000 loss: 2.7497280836105347
Epoch 365 completed out of 1000 loss: 2.7496672868728638
Epoch 366 completed out of 1000 loss: 2.7496052384376526
Epoch 367 completed out of 1000 loss: 2.7495482563972473
Epoch 368 completed out of 1000 loss: 2.7494913935661316
Epoch 369 completed out of 1000 loss: 2.749437093734741
Epoch 370 completed out of 1000 loss: 2.749380946159363
Epoch 371 completed out of 1000 loss: 2.7493286728858948
Epoch 372 completed out of 1000 loss: 2.749275267124176
Epoch 373 completed out of 1000 loss: 2.749222993850708
Epoch 374 completed out of 1000 loss: 2.7491716146469116
Epoch 375 completed out of 1000 loss: 2.7491214871406555
Epoch 376 completed out of 1000 loss: 2.7490733861923218
Epoch 377 completed out of 1000 loss: 2.7490254044532776
Epoch 378 completed out of 1000 loss: 2.748975455760956
Epoch 379 completed out of 1000 loss: 2.7489309906959534
Epoch 380 completed out of 1000 loss: 2.748883605003357
Epoch 381 completed out of 1000 loss: 2.7488375306129456
Epoch 382 completed out of 1000 loss: 2.7487932443618774
Epoch 383 completed out of 1000 loss: 2.748747408390045
Epoch 384 completed out of 1000 loss: 2.748702824115753
Epoch 385 completed out of 1000 loss: 2.748660922050476
Epoch 386 completed out of 1000 loss: 2.748616576194763
Epoch 387 completed out of 1000 loss: 2.748573064804077
Epoch 388 completed out of 1000 loss: 2.748530685901642
Epoch 389 completed out of 1000 loss: 2.748486876487732
Epoch 390 completed out of 1000 loss: 2.7484476566314697
Epoch 391 completed out of 1000 loss: 2.748404800891876
Epoch 392 completed out of 1000 loss: 2.748364210128784
Epoch 393 completed out of 1000 loss: 2.7483243346214294
Epoch 394 completed out of 1000 loss: 2.7482844591140747
Epoch 395 completed out of 1000 loss: 2.748243749141693
Epoch 396 completed out of 1000 loss: 2.748203992843628
Epoch 397 completed out of 1000 loss: 2.7481659650802612
Epoch 398 completed out of 1000 loss: 2.748123586177826
Epoch 399 completed out of 1000 loss: 2.748084008693695
Epoch 400 completed out of 1000 loss: 2.748043715953827
Epoch 401 completed out of 1000 loss: 2.748004913330078
Epoch 402 completed out of 1000 loss: 2.747965455055237
Epoch 403 completed out of 1000 loss: 2.747926890850067
Epoch 404 completed out of 1000 loss: 2.7478878498077393
Epoch 405 completed out of 1000 loss: 2.74784916639328
Epoch 406 completed out of 1000 loss: 2.7478116750717163
Epoch 407 completed out of 1000 loss: 2.7477723360061646
Epoch 408 completed out of 1000 loss: 2.7477336525917053
Epoch 409 completed out of 1000 loss: 2.7476948499679565
Epoch 410 completed out of 1000 loss: 2.747654974460602
Epoch 411 completed out of 1000 loss: 2.747617244720459
Epoch 412 completed out of 1000 loss: 2.7475773692131042
Epoch 413 completed out of 1000 loss: 2.7475391626358032
Epoch 414 completed out of 1000 loss: 2.747500717639923
Epoch 415 completed out of 1000 loss: 2.7474613785743713
Epoch 416 completed out of 1000 loss: 2.7474231123924255
Epoch 417 completed out of 1000 loss: 2.7473833560943604
Epoch 418 completed out of 1000 loss: 2.7473424673080444
Epoch 419 completed out of 1000 loss: 2.7473053336143494
Epoch 420 completed out of 1000 loss: 2.7472646832466125
Epoch 421 completed out of 1000 loss: 2.7472264766693115
Epoch 422 completed out of 1000 loss: 2.7471855878829956
Epoch 423 completed out of 1000 loss: 2.7471474409103394
Epoch 424 completed out of 1000 loss: 2.74710613489151
Epoch 425 completed out of 1000 loss: 2.7470663189888
Epoch 426 completed out of 1000 loss: 2.747027099132538
Epoch 427 completed out of 1000 loss: 2.7469868063926697
Epoch 428 completed out of 1000 loss: 2.7469440698623657
Epoch 429 completed out of 1000 loss: 2.7469034790992737
Epoch 430 completed out of 1000 loss: 2.7468605637550354
Epoch 431 completed out of 1000 loss: 2.746821105480194
Epoch 432 completed out of 1000 loss: 2.746779143810272
Epoch 433 completed out of 1000 loss: 2.7467362880706787
Epoch 434 completed out of 1000 loss: 2.7466933727264404
Epoch 435 completed out of 1000 loss: 2.7466508746147156
Epoch 436 completed out of 1000 loss: 2.746605694293976
Epoch 437 completed out of 1000 loss: 2.746564269065857
Epoch 438 completed out of 1000 loss: 2.7465201020240784
Epoch 439 completed out of 1000 loss: 2.7464749813079834
Epoch 440 completed out of 1000 loss: 2.746431052684784
Epoch 441 completed out of 1000 loss: 2.746385335922241
Epoch 442 completed out of 1000 loss: 2.7463406324386597
Epoch 443 completed out of 1000 loss: 2.7462932467460632
Epoch 444 completed out of 1000 loss: 2.7462477684020996
Epoch 445 completed out of 1000 loss: 2.7461986541748047
Epoch 446 completed out of 1000 loss: 2.7461493611335754
Epoch 447 completed out of 1000 loss: 2.746101975440979
Epoch 448 completed out of 1000 loss: 2.74605131149292
Epoch 449 completed out of 1000 loss: 2.7459988594055176
Epoch 450 completed out of 1000 loss: 2.7459476590156555
Epoch 451 completed out of 1000 loss: 2.7458974719047546
Epoch 452 completed out of 1000 loss: 2.7458449006080627
Epoch 453 completed out of 1000 loss: 2.745793044567108
Epoch 454 completed out of 1000 loss: 2.7457399368286133
Epoch 455 completed out of 1000 loss: 2.745687186717987
Epoch 456 completed out of 1000 loss: 2.7456310391426086
Epoch 457 completed out of 1000 loss: 2.7455771565437317
Epoch 458 completed out of 1000 loss: 2.74552059173584
Epoch 459 completed out of 1000 loss: 2.7454668283462524
Epoch 460 completed out of 1000 loss: 2.745410442352295
Epoch 461 completed out of 1000 loss: 2.7453523874282837
Epoch 462 completed out of 1000 loss: 2.7452933192253113
Epoch 463 completed out of 1000 loss: 2.7452357411384583
Epoch 464 completed out of 1000 loss: 2.745177924633026
Epoch 465 completed out of 1000 loss: 2.745119273662567
Epoch 466 completed out of 1000 loss: 2.745056688785553
Epoch 467 completed out of 1000 loss: 2.7449941635131836
Epoch 468 completed out of 1000 loss: 2.744931995868683
Epoch 469 completed out of 1000 loss: 2.744865596294403
Epoch 470 completed out of 1000 loss: 2.7447986602783203
Epoch 471 completed out of 1000 loss: 2.7447317242622375
Epoch 472 completed out of 1000 loss: 2.744661271572113
Epoch 473 completed out of 1000 loss: 2.744594097137451
Epoch 474 completed out of 1000 loss: 2.7445260882377625
Epoch 475 completed out of 1000 loss: 2.7444543838500977
Epoch 476 completed out of 1000 loss: 2.7443830370903015
Epoch 477 completed out of 1000 loss: 2.744313359260559
Epoch 478 completed out of 1000 loss: 2.7442381978034973
Epoch 479 completed out of 1000 loss: 2.7441614270210266
Epoch 480 completed out of 1000 loss: 2.7440854907035828
Epoch 481 completed out of 1000 loss: 2.744007408618927
Epoch 482 completed out of 1000 loss: 2.7439284324645996
Epoch 483 completed out of 1000 loss: 2.7438483238220215
Epoch 484 completed out of 1000 loss: 2.743766486644745
Epoch 485 completed out of 1000 loss: 2.7436856031417847
Epoch 486 completed out of 1000 loss: 2.7436001300811768
Epoch 487 completed out of 1000 loss: 2.74351567029953
Epoch 488 completed out of 1000 loss: 2.7434263229370117
Epoch 489 completed out of 1000 loss: 2.743338704109192
Epoch 490 completed out of 1000 loss: 2.7432459592819214
Epoch 491 completed out of 1000 loss: 2.743151009082794
Epoch 492 completed out of 1000 loss: 2.7430543303489685
Epoch 493 completed out of 1000 loss: 2.742958903312683
Epoch 494 completed out of 1000 loss: 2.742859423160553
Epoch 495 completed out of 1000 loss: 2.7427627444267273
Epoch 496 completed out of 1000 loss: 2.7426616549491882
Epoch 497 completed out of 1000 loss: 2.7425657510757446
Epoch 498 completed out of 1000 loss: 2.742468535900116
Epoch 499 completed out of 1000 loss: 2.742367923259735
Epoch 500 completed out of 1000 loss: 2.7422714829444885
Epoch 501 completed out of 1000 loss: 2.742169439792633
Epoch 502 completed out of 1000 loss: 2.74207466840744
Epoch 503 completed out of 1000 loss: 2.7419760823249817
Epoch 504 completed out of 1000 loss: 2.7418766617774963
Epoch 505 completed out of 1000 loss: 2.741782546043396
Epoch 506 completed out of 1000 loss: 2.7416823506355286
Epoch 507 completed out of 1000 loss: 2.741582453250885
Epoch 508 completed out of 1000 loss: 2.741483986377716
Epoch 509 completed out of 1000 loss: 2.7413856387138367
Epoch 510 completed out of 1000 loss: 2.7412875294685364
Epoch 511 completed out of 1000 loss: 2.7411860823631287
Epoch 512 completed out of 1000 loss: 2.7410863637924194
Epoch 513 completed out of 1000 loss: 2.7409858107566833
Epoch 514 completed out of 1000 loss: 2.740882694721222
Epoch 515 completed out of 1000 loss: 2.740782678127289
Epoch 516 completed out of 1000 loss: 2.7406798601150513
Epoch 517 completed out of 1000 loss: 2.7405776381492615
Epoch 518 completed out of 1000 loss: 2.7404732704162598
Epoch 519 completed out of 1000 loss: 2.740366280078888
Epoch 520 completed out of 1000 loss: 2.740260362625122
Epoch 521 completed out of 1000 loss: 2.740154206752777
Epoch 522 completed out of 1000 loss: 2.740045189857483
Epoch 523 completed out of 1000 loss: 2.739934265613556
Epoch 524 completed out of 1000 loss: 2.7398234009742737
Epoch 525 completed out of 1000 loss: 2.7397117018699646
Epoch 526 completed out of 1000 loss: 2.7395954728126526
Epoch 527 completed out of 1000 loss: 2.7394790053367615
Epoch 528 completed out of 1000 loss: 2.7393606901168823
Epoch 529 completed out of 1000 loss: 2.739242374897003
Epoch 530 completed out of 1000 loss: 2.739118754863739
Epoch 531 completed out of 1000 loss: 2.738993525505066
Epoch 532 completed out of 1000 loss: 2.738870084285736
Epoch 533 completed out of 1000 loss: 2.738737463951111
Epoch 534 completed out of 1000 loss: 2.738608241081238
Epoch 535 completed out of 1000 loss: 2.7384756803512573
Epoch 536 completed out of 1000 loss: 2.738340377807617
Epoch 537 completed out of 1000 loss: 2.7381996512413025
Epoch 538 completed out of 1000 loss: 2.7380611896514893
Epoch 539 completed out of 1000 loss: 2.7379167079925537
Epoch 540 completed out of 1000 loss: 2.7377724051475525
Epoch 541 completed out of 1000 loss: 2.737624764442444
Epoch 542 completed out of 1000 loss: 2.7374765872955322
Epoch 543 completed out of 1000 loss: 2.737323820590973
Epoch 544 completed out of 1000 loss: 2.7371718883514404
Epoch 545 completed out of 1000 loss: 2.7370165586471558
Epoch 546 completed out of 1000 loss: 2.736861765384674
Epoch 547 completed out of 1000 loss: 2.7367040514945984
Epoch 548 completed out of 1000 loss: 2.7365437746047974
Epoch 549 completed out of 1000 loss: 2.7363869547843933
Epoch 550 completed out of 1000 loss: 2.736228108406067
Epoch 551 completed out of 1000 loss: 2.736071765422821
Epoch 552 completed out of 1000 loss: 2.7359145283699036
Epoch 553 completed out of 1000 loss: 2.735758364200592
Epoch 554 completed out of 1000 loss: 2.73560494184494
Epoch 555 completed out of 1000 loss: 2.735453724861145
Epoch 556 completed out of 1000 loss: 2.73530375957489
Epoch 557 completed out of 1000 loss: 2.7351555824279785
Epoch 558 completed out of 1000 loss: 2.7350098490715027
Epoch 559 completed out of 1000 loss: 2.734867513179779
Epoch 560 completed out of 1000 loss: 2.7347291707992554
Epoch 561 completed out of 1000 loss: 2.734595775604248
Epoch 562 completed out of 1000 loss: 2.7344645261764526
Epoch 563 completed out of 1000 loss: 2.7343369722366333
Epoch 564 completed out of 1000 loss: 2.734214723110199
Epoch 565 completed out of 1000 loss: 2.7340985536575317
Epoch 566 completed out of 1000 loss: 2.733982741832733
Epoch 567 completed out of 1000 loss: 2.733874022960663
Epoch 568 completed out of 1000 loss: 2.733768045902252
Epoch 569 completed out of 1000 loss: 2.7336660027503967
Epoch 570 completed out of 1000 loss: 2.733570873737335
Epoch 571 completed out of 1000 loss: 2.733477234840393
Epoch 572 completed out of 1000 loss: 2.733389914035797
Epoch 573 completed out of 1000 loss: 2.733302116394043
Epoch 574 completed out of 1000 loss: 2.7332236766815186
Epoch 575 completed out of 1000 loss: 2.733145236968994
Epoch 576 completed out of 1000 loss: 2.7330719232559204
Epoch 577 completed out of 1000 loss: 2.7329981327056885
Epoch 578 completed out of 1000 loss: 2.7329302430152893
Epoch 579 completed out of 1000 loss: 2.732866048812866
Epoch 580 completed out of 1000 loss: 2.7328025102615356
Epoch 581 completed out of 1000 loss: 2.7327423691749573
Epoch 582 completed out of 1000 loss: 2.7326820492744446
Epoch 583 completed out of 1000 loss: 2.7326282262802124
Epoch 584 completed out of 1000 loss: 2.7325733304023743
Epoch 585 completed out of 1000 loss: 2.732520818710327
Epoch 586 completed out of 1000 loss: 2.7324671745300293
Epoch 587 completed out of 1000 loss: 2.7324150800704956
Epoch 588 completed out of 1000 loss: 2.732364296913147
Epoch 589 completed out of 1000 loss: 2.7323153018951416
Epoch 590 completed out of 1000 loss: 2.7322680950164795
Epoch 591 completed out of 1000 loss: 2.732221841812134
Epoch 592 completed out of 1000 loss: 2.732175648212433
Epoch 593 completed out of 1000 loss: 2.7321274876594543
Epoch 594 completed out of 1000 loss: 2.7320817708969116
Epoch 595 completed out of 1000 loss: 2.732034146785736
Epoch 596 completed out of 1000 loss: 2.731989860534668
Epoch 597 completed out of 1000 loss: 2.731948971748352
Epoch 598 completed out of 1000 loss: 2.731908679008484
Epoch 599 completed out of 1000 loss: 2.731874406337738
Epoch 600 completed out of 1000 loss: 2.731837809085846
Epoch 601 completed out of 1000 loss: 2.731802761554718
Epoch 602 completed out of 1000 loss: 2.7317691445350647
Epoch 603 completed out of 1000 loss: 2.731733798980713
Epoch 604 completed out of 1000 loss: 2.731703460216522
Epoch 605 completed out of 1000 loss: 2.7316720485687256
Epoch 606 completed out of 1000 loss: 2.731640338897705
Epoch 607 completed out of 1000 loss: 2.7316089868545532
Epoch 608 completed out of 1000 loss: 2.7315807342529297
Epoch 609 completed out of 1000 loss: 2.731551766395569
Epoch 610 completed out of 1000 loss: 2.7315255999565125
Epoch 611 completed out of 1000 loss: 2.7314974665641785
Epoch 612 completed out of 1000 loss: 2.7314711213111877
Epoch 613 completed out of 1000 loss: 2.7314453125
Epoch 614 completed out of 1000 loss: 2.7314194440841675
Epoch 615 completed out of 1000 loss: 2.7313954830169678
Epoch 616 completed out of 1000 loss: 2.731370687484741
Epoch 617 completed out of 1000 loss: 2.7313464879989624
Epoch 618 completed out of 1000 loss: 2.731323003768921
Epoch 619 completed out of 1000 loss: 2.7313002943992615
Epoch 620 completed out of 1000 loss: 2.731278121471405
Epoch 621 completed out of 1000 loss: 2.7312567234039307
Epoch 622 completed out of 1000 loss: 2.7312349677085876
Epoch 623 completed out of 1000 loss: 2.731213927268982
Epoch 624 completed out of 1000 loss: 2.7311925888061523
Epoch 625 completed out of 1000 loss: 2.7311729192733765
Epoch 626 completed out of 1000 loss: 2.7311519384384155
Epoch 627 completed out of 1000 loss: 2.7311294078826904
Epoch 628 completed out of 1000 loss: 2.7311131954193115
Epoch 629 completed out of 1000 loss: 2.7310933470726013
Epoch 630 completed out of 1000 loss: 2.7310729026794434
Epoch 631 completed out of 1000 loss: 2.73105525970459
Epoch 632 completed out of 1000 loss: 2.73103529214859
Epoch 633 completed out of 1000 loss: 2.731015622615814
Epoch 634 completed out of 1000 loss: 2.730996012687683
Epoch 635 completed out of 1000 loss: 2.730977773666382
Epoch 636 completed out of 1000 loss: 2.730960249900818
Epoch 637 completed out of 1000 loss: 2.730943441390991
Epoch 638 completed out of 1000 loss: 2.730925977230072
Epoch 639 completed out of 1000 loss: 2.7309077978134155
Epoch 640 completed out of 1000 loss: 2.7308892607688904
Epoch 641 completed out of 1000 loss: 2.730870842933655
Epoch 642 completed out of 1000 loss: 2.7308545112609863
Epoch 643 completed out of 1000 loss: 2.730835437774658
Epoch 644 completed out of 1000 loss: 2.7308179140090942
Epoch 645 completed out of 1000 loss: 2.7307998538017273
Epoch 646 completed out of 1000 loss: 2.7307817935943604
Epoch 647 completed out of 1000 loss: 2.730763614177704
Epoch 648 completed out of 1000 loss: 2.730746030807495
Epoch 649 completed out of 1000 loss: 2.7307296991348267
Epoch 650 completed out of 1000 loss: 2.73071026802063
Epoch 651 completed out of 1000 loss: 2.730691075325012
Epoch 652 completed out of 1000 loss: 2.730673372745514
Epoch 653 completed out of 1000 loss: 2.7306535840034485
Epoch 654 completed out of 1000 loss: 2.7306341528892517
Epoch 655 completed out of 1000 loss: 2.730615556240082
Epoch 656 completed out of 1000 loss: 2.730596661567688
Epoch 657 completed out of 1000 loss: 2.7305766940116882
Epoch 658 completed out of 1000 loss: 2.7305583357810974
Epoch 659 completed out of 1000 loss: 2.730537474155426
Epoch 660 completed out of 1000 loss: 2.7305179834365845
Epoch 661 completed out of 1000 loss: 2.7304998636245728
Epoch 662 completed out of 1000 loss: 2.7304813265800476
Epoch 663 completed out of 1000 loss: 2.7304614186286926
Epoch 664 completed out of 1000 loss: 2.730443000793457
Epoch 665 completed out of 1000 loss: 2.730424642562866
Epoch 666 completed out of 1000 loss: 2.730405628681183
Epoch 667 completed out of 1000 loss: 2.730385899543762
Epoch 668 completed out of 1000 loss: 2.7303649187088013
Epoch 669 completed out of 1000 loss: 2.7303450107574463
Epoch 670 completed out of 1000 loss: 2.7303256392478943
Epoch 671 completed out of 1000 loss: 2.730308771133423
Epoch 672 completed out of 1000 loss: 2.730290710926056
Epoch 673 completed out of 1000 loss: 2.730273485183716
Epoch 674 completed out of 1000 loss: 2.730257213115692
Epoch 675 completed out of 1000 loss: 2.73024320602417
Epoch 676 completed out of 1000 loss: 2.7302276492118835
Epoch 677 completed out of 1000 loss: 2.7302143573760986
Epoch 678 completed out of 1000 loss: 2.7302008271217346
Epoch 679 completed out of 1000 loss: 2.7301847338676453
Epoch 680 completed out of 1000 loss: 2.7301701307296753
Epoch 681 completed out of 1000 loss: 2.7301580905914307
Epoch 682 completed out of 1000 loss: 2.730144441127777
Epoch 683 completed out of 1000 loss: 2.7301315665245056
Epoch 684 completed out of 1000 loss: 2.7301188707351685
Epoch 685 completed out of 1000 loss: 2.730106472969055
Epoch 686 completed out of 1000 loss: 2.7300920486450195
Epoch 687 completed out of 1000 loss: 2.730080723762512
Epoch 688 completed out of 1000 loss: 2.730068027973175
Epoch 689 completed out of 1000 loss: 2.7300546765327454
Epoch 690 completed out of 1000 loss: 2.7300427556037903
Epoch 691 completed out of 1000 loss: 2.730031728744507
Epoch 692 completed out of 1000 loss: 2.730019509792328
Epoch 693 completed out of 1000 loss: 2.730006754398346
Epoch 694 completed out of 1000 loss: 2.729993224143982
Epoch 695 completed out of 1000 loss: 2.7299826741218567
Epoch 696 completed out of 1000 loss: 2.7299710512161255
Epoch 697 completed out of 1000 loss: 2.729959011077881
Epoch 698 completed out of 1000 loss: 2.7299469709396362
Epoch 699 completed out of 1000 loss: 2.7299373745918274
Epoch 700 completed out of 1000 loss: 2.729926645755768
Epoch 701 completed out of 1000 loss: 2.729914128780365
Epoch 702 completed out of 1000 loss: 2.7299057245254517
Epoch 703 completed out of 1000 loss: 2.729893386363983
Epoch 704 completed out of 1000 loss: 2.7298826575279236
Epoch 705 completed out of 1000 loss: 2.729867935180664
Epoch 706 completed out of 1000 loss: 2.7298574447631836
Epoch 707 completed out of 1000 loss: 2.729848086833954
Epoch 708 completed out of 1000 loss: 2.729834735393524
Epoch 709 completed out of 1000 loss: 2.7298237681388855
Epoch 710 completed out of 1000 loss: 2.729813873767853
Epoch 711 completed out of 1000 loss: 2.729804277420044
Epoch 712 completed out of 1000 loss: 2.729792296886444
Epoch 713 completed out of 1000 loss: 2.729780375957489
Epoch 714 completed out of 1000 loss: 2.7297685742378235
Epoch 715 completed out of 1000 loss: 2.7297605872154236
Epoch 716 completed out of 1000 loss: 2.72974956035614
Epoch 717 completed out of 1000 loss: 2.729737162590027
Epoch 718 completed out of 1000 loss: 2.7297253012657166
Epoch 719 completed out of 1000 loss: 2.7297149300575256
Epoch 720 completed out of 1000 loss: 2.7297038435935974
Epoch 721 completed out of 1000 loss: 2.729694128036499
Epoch 722 completed out of 1000 loss: 2.7296828627586365
Epoch 723 completed out of 1000 loss: 2.7296740412712097
Epoch 724 completed out of 1000 loss: 2.7296623587608337
Epoch 725 completed out of 1000 loss: 2.72965270280838
Epoch 726 completed out of 1000 loss: 2.729640781879425
Epoch 727 completed out of 1000 loss: 2.729630708694458
Epoch 728 completed out of 1000 loss: 2.7296180725097656
Epoch 729 completed out of 1000 loss: 2.7296077013015747
Epoch 730 completed out of 1000 loss: 2.729596197605133
Epoch 731 completed out of 1000 loss: 2.729587435722351
Epoch 732 completed out of 1000 loss: 2.729577124118805
Epoch 733 completed out of 1000 loss: 2.729566752910614
Epoch 734 completed out of 1000 loss: 2.7295531630516052
Epoch 735 completed out of 1000 loss: 2.7295461893081665
Epoch 736 completed out of 1000 loss: 2.729535937309265
Epoch 737 completed out of 1000 loss: 2.729523718357086
Epoch 738 completed out of 1000 loss: 2.7295128107070923
Epoch 739 completed out of 1000 loss: 2.729502320289612
Epoch 740 completed out of 1000 loss: 2.7294927835464478
Epoch 741 completed out of 1000 loss: 2.729480564594269
Epoch 742 completed out of 1000 loss: 2.7294703125953674
Epoch 743 completed out of 1000 loss: 2.729454755783081
Epoch 744 completed out of 1000 loss: 2.7294450998306274
Epoch 745 completed out of 1000 loss: 2.7294328212738037
Epoch 746 completed out of 1000 loss: 2.729420006275177
Epoch 747 completed out of 1000 loss: 2.729404389858246
Epoch 748 completed out of 1000 loss: 2.729387402534485
Epoch 749 completed out of 1000 loss: 2.7293694615364075
Epoch 750 completed out of 1000 loss: 2.729341745376587
Epoch 751 completed out of 1000 loss: 2.7292776107788086
Epoch 752 completed out of 1000 loss: 2.729244351387024
Epoch 753 completed out of 1000 loss: 2.7292346954345703
Epoch 754 completed out of 1000 loss: 2.729216516017914
Epoch 755 completed out of 1000 loss: 2.7291901111602783
Epoch 756 completed out of 1000 loss: 2.7291690707206726
Epoch 757 completed out of 1000 loss: 2.7291552424430847
Epoch 758 completed out of 1000 loss: 2.7291430234909058
Epoch 759 completed out of 1000 loss: 2.7291260957717896
Epoch 760 completed out of 1000 loss: 2.729110062122345
Epoch 761 completed out of 1000 loss: 2.729098856449127
Epoch 762 completed out of 1000 loss: 2.7290879487991333
Epoch 763 completed out of 1000 loss: 2.729073941707611
Epoch 764 completed out of 1000 loss: 2.7290627360343933
Epoch 765 completed out of 1000 loss: 2.729049563407898
Epoch 766 completed out of 1000 loss: 2.729038178920746
Epoch 767 completed out of 1000 loss: 2.729024350643158
Epoch 768 completed out of 1000 loss: 2.729011654853821
Epoch 769 completed out of 1000 loss: 2.728999137878418
Epoch 770 completed out of 1000 loss: 2.7289857864379883
Epoch 771 completed out of 1000 loss: 2.7289730310440063
Epoch 772 completed out of 1000 loss: 2.7289570569992065
Epoch 773 completed out of 1000 loss: 2.7289422750473022
Epoch 774 completed out of 1000 loss: 2.7289289236068726
Epoch 775 completed out of 1000 loss: 2.728913128376007
Epoch 776 completed out of 1000 loss: 2.7289037704467773
Epoch 777 completed out of 1000 loss: 2.7288907766342163
Epoch 778 completed out of 1000 loss: 2.7288808822631836
Epoch 779 completed out of 1000 loss: 2.728867471218109
Epoch 780 completed out of 1000 loss: 2.7288577556610107
Epoch 781 completed out of 1000 loss: 2.728845179080963
Epoch 782 completed out of 1000 loss: 2.72883403301239
Epoch 783 completed out of 1000 loss: 2.7288225293159485
Epoch 784 completed out of 1000 loss: 2.728810966014862
Epoch 785 completed out of 1000 loss: 2.7287989258766174
Epoch 786 completed out of 1000 loss: 2.7287859320640564
Epoch 787 completed out of 1000 loss: 2.7287724018096924
Epoch 788 completed out of 1000 loss: 2.7287628650665283
Epoch 789 completed out of 1000 loss: 2.7287490367889404
Epoch 790 completed out of 1000 loss: 2.7287351489067078
Epoch 791 completed out of 1000 loss: 2.7287221550941467
Epoch 792 completed out of 1000 loss: 2.728703558444977
Epoch 793 completed out of 1000 loss: 2.728685677051544
Epoch 794 completed out of 1000 loss: 2.7286698818206787
Epoch 795 completed out of 1000 loss: 2.728655159473419
Epoch 796 completed out of 1000 loss: 2.7286435961723328
Epoch 797 completed out of 1000 loss: 2.7286320328712463
Epoch 798 completed out of 1000 loss: 2.7286182641983032
Epoch 799 completed out of 1000 loss: 2.728606700897217
Epoch 800 completed out of 1000 loss: 2.7285919785499573
Epoch 801 completed out of 1000 loss: 2.72857803106308
Epoch 802 completed out of 1000 loss: 2.728566586971283
Epoch 803 completed out of 1000 loss: 2.7285544872283936
Epoch 804 completed out of 1000 loss: 2.7285420894622803
Epoch 805 completed out of 1000 loss: 2.728528320789337
Epoch 806 completed out of 1000 loss: 2.728516161441803
Epoch 807 completed out of 1000 loss: 2.7285041213035583
Epoch 808 completed out of 1000 loss: 2.728491425514221
Epoch 809 completed out of 1000 loss: 2.728482186794281
Epoch 810 completed out of 1000 loss: 2.728469491004944
Epoch 811 completed out of 1000 loss: 2.7284562587738037
Epoch 812 completed out of 1000 loss: 2.728442907333374
Epoch 813 completed out of 1000 loss: 2.728433668613434
Epoch 814 completed out of 1000 loss: 2.728421211242676
Epoch 815 completed out of 1000 loss: 2.7284092903137207
Epoch 816 completed out of 1000 loss: 2.7283979654312134
Epoch 817 completed out of 1000 loss: 2.7283865213394165
Epoch 818 completed out of 1000 loss: 2.72837632894516
Epoch 819 completed out of 1000 loss: 2.728365480899811
Epoch 820 completed out of 1000 loss: 2.72835111618042
Epoch 821 completed out of 1000 loss: 2.7283408641815186
Epoch 822 completed out of 1000 loss: 2.728329598903656
Epoch 823 completed out of 1000 loss: 2.72831928730011
Epoch 824 completed out of 1000 loss: 2.7283061146736145
Epoch 825 completed out of 1000 loss: 2.728296935558319
Epoch 826 completed out of 1000 loss: 2.7282851338386536
Epoch 827 completed out of 1000 loss: 2.7282734513282776
Epoch 828 completed out of 1000 loss: 2.7282626628875732
Epoch 829 completed out of 1000 loss: 2.7282503247261047
Epoch 830 completed out of 1000 loss: 2.7282389998435974
Epoch 831 completed out of 1000 loss: 2.7282280921936035
Epoch 832 completed out of 1000 loss: 2.728215992450714
Epoch 833 completed out of 1000 loss: 2.7282036542892456
Epoch 834 completed out of 1000 loss: 2.728189766407013
Epoch 835 completed out of 1000 loss: 2.728178560733795
Epoch 836 completed out of 1000 loss: 2.728166341781616
Epoch 837 completed out of 1000 loss: 2.728150427341461
Epoch 838 completed out of 1000 loss: 2.728135108947754
Epoch 839 completed out of 1000 loss: 2.7281203269958496
Epoch 840 completed out of 1000 loss: 2.728108048439026
Epoch 841 completed out of 1000 loss: 2.728093445301056
Epoch 842 completed out of 1000 loss: 2.7280797362327576
Epoch 843 completed out of 1000 loss: 2.728066861629486
Epoch 844 completed out of 1000 loss: 2.7280534505844116
Epoch 845 completed out of 1000 loss: 2.7280405163764954
Epoch 846 completed out of 1000 loss: 2.7280284762382507
Epoch 847 completed out of 1000 loss: 2.728018045425415
Epoch 848 completed out of 1000 loss: 2.728005826473236
Epoch 849 completed out of 1000 loss: 2.727992832660675
Epoch 850 completed out of 1000 loss: 2.7279820442199707
Epoch 851 completed out of 1000 loss: 2.7279704809188843
Epoch 852 completed out of 1000 loss: 2.7279601097106934
Epoch 853 completed out of 1000 loss: 2.72794646024704
Epoch 854 completed out of 1000 loss: 2.727935254573822
Epoch 855 completed out of 1000 loss: 2.7279250621795654
Epoch 856 completed out of 1000 loss: 2.7279112339019775
Epoch 857 completed out of 1000 loss: 2.7279037833213806
Epoch 858 completed out of 1000 loss: 2.727889597415924
Epoch 859 completed out of 1000 loss: 2.727877199649811
Epoch 860 completed out of 1000 loss: 2.727868676185608
Epoch 861 completed out of 1000 loss: 2.7278568744659424
Epoch 862 completed out of 1000 loss: 2.727846920490265
Epoch 863 completed out of 1000 loss: 2.7278342247009277
Epoch 864 completed out of 1000 loss: 2.7278263568878174
Epoch 865 completed out of 1000 loss: 2.727812707424164
Epoch 866 completed out of 1000 loss: 2.7278031706809998
Epoch 867 completed out of 1000 loss: 2.7277926802635193
Epoch 868 completed out of 1000 loss: 2.727781355381012
Epoch 869 completed out of 1000 loss: 2.727771520614624
Epoch 870 completed out of 1000 loss: 2.72776061296463
Epoch 871 completed out of 1000 loss: 2.7277482748031616
Epoch 872 completed out of 1000 loss: 2.727738678455353
Epoch 873 completed out of 1000 loss: 2.727727472782135
Epoch 874 completed out of 1000 loss: 2.727717101573944
Epoch 875 completed out of 1000 loss: 2.727706551551819
Epoch 876 completed out of 1000 loss: 2.7276933789253235
Epoch 877 completed out of 1000 loss: 2.727682590484619
Epoch 878 completed out of 1000 loss: 2.7276737689971924
Epoch 879 completed out of 1000 loss: 2.72766375541687
Epoch 880 completed out of 1000 loss: 2.727652847766876
Epoch 881 completed out of 1000 loss: 2.727640748023987
Epoch 882 completed out of 1000 loss: 2.727631151676178
Epoch 883 completed out of 1000 loss: 2.7276208996772766
Epoch 884 completed out of 1000 loss: 2.7276106476783752
Epoch 885 completed out of 1000 loss: 2.7276000380516052
Epoch 886 completed out of 1000 loss: 2.7275867462158203
Epoch 887 completed out of 1000 loss: 2.727578043937683
Epoch 888 completed out of 1000 loss: 2.72756826877594
Epoch 889 completed out of 1000 loss: 2.727557897567749
Epoch 890 completed out of 1000 loss: 2.7275447249412537
Epoch 891 completed out of 1000 loss: 2.727532386779785
Epoch 892 completed out of 1000 loss: 2.727525532245636
Epoch 893 completed out of 1000 loss: 2.727513372898102
Epoch 894 completed out of 1000 loss: 2.7275025844573975
Epoch 895 completed out of 1000 loss: 2.727492094039917
Epoch 896 completed out of 1000 loss: 2.727480947971344
Epoch 897 completed out of 1000 loss: 2.7274705171585083
Epoch 898 completed out of 1000 loss: 2.727457880973816
Epoch 899 completed out of 1000 loss: 2.727450132369995
Epoch 900 completed out of 1000 loss: 2.7274370193481445
Epoch 901 completed out of 1000 loss: 2.727427363395691
Epoch 902 completed out of 1000 loss: 2.727417469024658
Epoch 903 completed out of 1000 loss: 2.727404832839966
Epoch 904 completed out of 1000 loss: 2.7273954153060913
Epoch 905 completed out of 1000 loss: 2.727384090423584
Epoch 906 completed out of 1000 loss: 2.7273733019828796
Epoch 907 completed out of 1000 loss: 2.727362334728241
Epoch 908 completed out of 1000 loss: 2.727351486682892
Epoch 909 completed out of 1000 loss: 2.7273396253585815
Epoch 910 completed out of 1000 loss: 2.727328836917877
Epoch 911 completed out of 1000 loss: 2.72731751203537
Epoch 912 completed out of 1000 loss: 2.7273078560829163
Epoch 913 completed out of 1000 loss: 2.7272943258285522
Epoch 914 completed out of 1000 loss: 2.72728431224823
Epoch 915 completed out of 1000 loss: 2.7272714972496033
Epoch 916 completed out of 1000 loss: 2.727263569831848
Epoch 917 completed out of 1000 loss: 2.727250337600708
Epoch 918 completed out of 1000 loss: 2.727238118648529
Epoch 919 completed out of 1000 loss: 2.727228820323944
Epoch 920 completed out of 1000 loss: 2.7272173166275024
Epoch 921 completed out of 1000 loss: 2.7272056341171265
Epoch 922 completed out of 1000 loss: 2.7271904349327087
Epoch 923 completed out of 1000 loss: 2.7271804213523865
Epoch 924 completed out of 1000 loss: 2.7271710634231567
Epoch 925 completed out of 1000 loss: 2.7271567583084106
Epoch 926 completed out of 1000 loss: 2.7271445393562317
Epoch 927 completed out of 1000 loss: 2.7271326780319214
Epoch 928 completed out of 1000 loss: 2.727121412754059
Epoch 929 completed out of 1000 loss: 2.727107346057892
Epoch 930 completed out of 1000 loss: 2.727096915245056
Epoch 931 completed out of 1000 loss: 2.727084994316101
Epoch 932 completed out of 1000 loss: 2.727072298526764
Epoch 933 completed out of 1000 loss: 2.7270607948303223
Epoch 934 completed out of 1000 loss: 2.727047860622406
Epoch 935 completed out of 1000 loss: 2.727033793926239
Epoch 936 completed out of 1000 loss: 2.727023124694824
Epoch 937 completed out of 1000 loss: 2.72700959444046
Epoch 938 completed out of 1000 loss: 2.726995885372162
Epoch 939 completed out of 1000 loss: 2.7269840240478516
Epoch 940 completed out of 1000 loss: 2.72697114944458
Epoch 941 completed out of 1000 loss: 2.7269570231437683
Epoch 942 completed out of 1000 loss: 2.7269424200057983
Epoch 943 completed out of 1000 loss: 2.726930022239685
Epoch 944 completed out of 1000 loss: 2.7269155979156494
Epoch 945 completed out of 1000 loss: 2.7269009351730347
Epoch 946 completed out of 1000 loss: 2.726887881755829
Epoch 947 completed out of 1000 loss: 2.726873457431793
Epoch 948 completed out of 1000 loss: 2.7268587350845337
Epoch 949 completed out of 1000 loss: 2.726843297481537
Epoch 950 completed out of 1000 loss: 2.7268304228782654
Epoch 951 completed out of 1000 loss: 2.7268142700195312
Epoch 952 completed out of 1000 loss: 2.7267998456954956
Epoch 953 completed out of 1000 loss: 2.7267841696739197
Epoch 954 completed out of 1000 loss: 2.726767897605896
Epoch 955 completed out of 1000 loss: 2.7267499566078186
Epoch 956 completed out of 1000 loss: 2.7267367243766785
Epoch 957 completed out of 1000 loss: 2.726720690727234
Epoch 958 completed out of 1000 loss: 2.726705551147461
Epoch 959 completed out of 1000 loss: 2.7266897559165955
Epoch 960 completed out of 1000 loss: 2.7266751527786255
Epoch 961 completed out of 1000 loss: 2.7266581654548645
Epoch 962 completed out of 1000 loss: 2.7266428470611572
Epoch 963 completed out of 1000 loss: 2.7266279458999634
Epoch 964 completed out of 1000 loss: 2.7266119718551636
Epoch 965 completed out of 1000 loss: 2.726595103740692
Epoch 966 completed out of 1000 loss: 2.726581037044525
Epoch 967 completed out of 1000 loss: 2.7265648245811462
Epoch 968 completed out of 1000 loss: 2.726547956466675
Epoch 969 completed out of 1000 loss: 2.7265336513519287
Epoch 970 completed out of 1000 loss: 2.726514518260956
Epoch 971 completed out of 1000 loss: 2.7264984250068665
Epoch 972 completed out of 1000 loss: 2.7264830470085144
Epoch 973 completed out of 1000 loss: 2.726466178894043
Epoch 974 completed out of 1000 loss: 2.7264504432678223
Epoch 975 completed out of 1000 loss: 2.726431369781494
Epoch 976 completed out of 1000 loss: 2.7264153957366943
Epoch 977 completed out of 1000 loss: 2.726399540901184
Epoch 978 completed out of 1000 loss: 2.7263819575309753
Epoch 979 completed out of 1000 loss: 2.7263644337654114
Epoch 980 completed out of 1000 loss: 2.726344645023346
Epoch 981 completed out of 1000 loss: 2.726328134536743
Epoch 982 completed out of 1000 loss: 2.726311504840851
Epoch 983 completed out of 1000 loss: 2.7262903451919556
Epoch 984 completed out of 1000 loss: 2.7262723445892334
Epoch 985 completed out of 1000 loss: 2.7262524366378784
Epoch 986 completed out of 1000 loss: 2.7262332439422607
Epoch 987 completed out of 1000 loss: 2.7262133359909058
Epoch 988 completed out of 1000 loss: 2.726194739341736
Epoch 989 completed out of 1000 loss: 2.7261754870414734
Epoch 990 completed out of 1000 loss: 2.7261536717414856
Epoch 991 completed out of 1000 loss: 2.7261326909065247
Epoch 992 completed out of 1000 loss: 2.7261111736297607
Epoch 993 completed out of 1000 loss: 2.7260913252830505
Epoch 994 completed out of 1000 loss: 2.726068913936615
Epoch 995 completed out of 1000 loss: 2.726043403148651
Epoch 996 completed out of 1000 loss: 2.7260189056396484
Epoch 997 completed out of 1000 loss: 2.7259976267814636
Epoch 998 completed out of 1000 loss: 2.7259730100631714
Epoch 999 completed out of 1000 loss: 2.725946843624115
Tensor("Softmax_2:0", shape=(?, 2), dtype=float32)
[[0.17966399 0.82033604]
 [0.42574254 0.57425743]
 [0.8862235  0.11377653]
 ...
 [0.51942587 0.48057416]
 [0.48649997 0.5135001 ]
 [0.48778477 0.5122152 ]]
False
accuracy on train: 0.57087994
Accuracy on test : 0.5592382
[[0. 1.]
 [0. 1.]
 [1. 0.]
 ...
 [0. 1.]
 [0. 1.]
 [0. 1.]]
In [118]:
cm8 = confusion_matrix(df_yc,predy3)
plt.figure(figsize = (10,7))
cm8df = pd.DataFrame(cm8, index = ["1",'0'], columns = ["1",'0'])
seaborn.set(font_scale=1.4)#for label size
seaborn.heatmap(cm8df, annot=True,annot_kws={"size": 16})# font size
Out[118]:
<matplotlib.axes._subplots.AxesSubplot at 0x20f0c752e88>
In [119]:
fpr, tpr, _ = roc_curve(predy3, df_yc, drop_intermediate=False)
plt.plot(fpr, tpr, color='red')
plt.xlabel('fpr')
plt.ylabel('tpr')
plt.title('ROC curve fr Tensorflow 3')
plt.show()
In [120]:
losslist4,predy4 = train_neural_network2(x, dfabce, df_yabceten, dfd, df_ydten)
predy4=np.argmax(predy4,axis=1)
Epoch 0 completed out of 1000 loss: 3.2694172859191895
Epoch 1 completed out of 1000 loss: 3.2650516629219055
Epoch 2 completed out of 1000 loss: 3.2607805132865906
Epoch 3 completed out of 1000 loss: 3.2566741704940796
Epoch 4 completed out of 1000 loss: 3.252859652042389
Epoch 5 completed out of 1000 loss: 3.249247968196869
Epoch 6 completed out of 1000 loss: 3.2456971406936646
Epoch 7 completed out of 1000 loss: 3.242029845714569
Epoch 8 completed out of 1000 loss: 3.2381789088249207
Epoch 9 completed out of 1000 loss: 3.2342768907546997
Epoch 10 completed out of 1000 loss: 3.230494439601898
Epoch 11 completed out of 1000 loss: 3.2269200682640076
Epoch 12 completed out of 1000 loss: 3.2234086990356445
Epoch 13 completed out of 1000 loss: 3.2195246815681458
Epoch 14 completed out of 1000 loss: 3.2151577472686768
Epoch 15 completed out of 1000 loss: 3.210892379283905
Epoch 16 completed out of 1000 loss: 3.206506848335266
Epoch 17 completed out of 1000 loss: 3.2022129893302917
Epoch 18 completed out of 1000 loss: 3.197977900505066
Epoch 19 completed out of 1000 loss: 3.194095730781555
Epoch 20 completed out of 1000 loss: 3.19006609916687
Epoch 21 completed out of 1000 loss: 3.185913562774658
Epoch 22 completed out of 1000 loss: 3.1815192699432373
Epoch 23 completed out of 1000 loss: 3.1769714951515198
Epoch 24 completed out of 1000 loss: 3.173051118850708
Epoch 25 completed out of 1000 loss: 3.1694119572639465
Epoch 26 completed out of 1000 loss: 3.1654069423675537
Epoch 27 completed out of 1000 loss: 3.1612104773521423
Epoch 28 completed out of 1000 loss: 3.157111942768097
Epoch 29 completed out of 1000 loss: 3.153152883052826
Epoch 30 completed out of 1000 loss: 3.1485097408294678
Epoch 31 completed out of 1000 loss: 3.1437954902648926
Epoch 32 completed out of 1000 loss: 3.1397682428359985
Epoch 33 completed out of 1000 loss: 3.135853886604309
Epoch 34 completed out of 1000 loss: 3.1319323778152466
Epoch 35 completed out of 1000 loss: 3.128180503845215
Epoch 36 completed out of 1000 loss: 3.124616265296936
Epoch 37 completed out of 1000 loss: 3.1212267875671387
Epoch 38 completed out of 1000 loss: 3.1178662180900574
Epoch 39 completed out of 1000 loss: 3.1144310235977173
Epoch 40 completed out of 1000 loss: 3.110675573348999
Epoch 41 completed out of 1000 loss: 3.106978952884674
Epoch 42 completed out of 1000 loss: 3.1034310460090637
Epoch 43 completed out of 1000 loss: 3.0994668006896973
Epoch 44 completed out of 1000 loss: 3.0952744483947754
Epoch 45 completed out of 1000 loss: 3.0912477374076843
Epoch 46 completed out of 1000 loss: 3.087263822555542
Epoch 47 completed out of 1000 loss: 3.0834655165672302
Epoch 48 completed out of 1000 loss: 3.079643130302429
Epoch 49 completed out of 1000 loss: 3.075841546058655
Epoch 50 completed out of 1000 loss: 3.0718806385993958
Epoch 51 completed out of 1000 loss: 3.06773978471756
Epoch 52 completed out of 1000 loss: 3.063806414604187
Epoch 53 completed out of 1000 loss: 3.0597009658813477
Epoch 54 completed out of 1000 loss: 3.0556376576423645
Epoch 55 completed out of 1000 loss: 3.051675319671631
Epoch 56 completed out of 1000 loss: 3.04787141084671
Epoch 57 completed out of 1000 loss: 3.044309079647064
Epoch 58 completed out of 1000 loss: 3.0410404205322266
Epoch 59 completed out of 1000 loss: 3.037888288497925
Epoch 60 completed out of 1000 loss: 3.034761428833008
Epoch 61 completed out of 1000 loss: 3.0316784977912903
Epoch 62 completed out of 1000 loss: 3.028651714324951
Epoch 63 completed out of 1000 loss: 3.025611698627472
Epoch 64 completed out of 1000 loss: 3.0225375294685364
Epoch 65 completed out of 1000 loss: 3.0194604992866516
Epoch 66 completed out of 1000 loss: 3.016661047935486
Epoch 67 completed out of 1000 loss: 3.014263331890106
Epoch 68 completed out of 1000 loss: 3.011725127696991
Epoch 69 completed out of 1000 loss: 3.0092039108276367
Epoch 70 completed out of 1000 loss: 3.0066375732421875
Epoch 71 completed out of 1000 loss: 3.003841519355774
Epoch 72 completed out of 1000 loss: 3.00049889087677
Epoch 73 completed out of 1000 loss: 2.9973137974739075
Epoch 74 completed out of 1000 loss: 2.9947808384895325
Epoch 75 completed out of 1000 loss: 2.992561459541321
Epoch 76 completed out of 1000 loss: 2.989992678165436
Epoch 77 completed out of 1000 loss: 2.987397313117981
Epoch 78 completed out of 1000 loss: 2.9844436049461365
Epoch 79 completed out of 1000 loss: 2.9825520515441895
Epoch 80 completed out of 1000 loss: 2.9808462858200073
Epoch 81 completed out of 1000 loss: 2.9789887070655823
Epoch 82 completed out of 1000 loss: 2.9770665168762207
Epoch 83 completed out of 1000 loss: 2.975446581840515
Epoch 84 completed out of 1000 loss: 2.974063515663147
Epoch 85 completed out of 1000 loss: 2.9726786017417908
Epoch 86 completed out of 1000 loss: 2.9713083505630493
Epoch 87 completed out of 1000 loss: 2.9699236154556274
Epoch 88 completed out of 1000 loss: 2.968701660633087
Epoch 89 completed out of 1000 loss: 2.967664659023285
Epoch 90 completed out of 1000 loss: 2.966605544090271
Epoch 91 completed out of 1000 loss: 2.9656513929367065
Epoch 92 completed out of 1000 loss: 2.9646806120872498
Epoch 93 completed out of 1000 loss: 2.9636210799217224
Epoch 94 completed out of 1000 loss: 2.9624425172805786
Epoch 95 completed out of 1000 loss: 2.961041212081909
Epoch 96 completed out of 1000 loss: 2.959363579750061
Epoch 97 completed out of 1000 loss: 2.958087146282196
Epoch 98 completed out of 1000 loss: 2.957195281982422
Epoch 99 completed out of 1000 loss: 2.956209659576416
Epoch 100 completed out of 1000 loss: 2.955104649066925
Epoch 101 completed out of 1000 loss: 2.9541720747947693
Epoch 102 completed out of 1000 loss: 2.9532413482666016
Epoch 103 completed out of 1000 loss: 2.9522481560707092
Epoch 104 completed out of 1000 loss: 2.9512524604797363
Epoch 105 completed out of 1000 loss: 2.9504764676094055
Epoch 106 completed out of 1000 loss: 2.9498267769813538
Epoch 107 completed out of 1000 loss: 2.9490986466407776
Epoch 108 completed out of 1000 loss: 2.948114812374115
Epoch 109 completed out of 1000 loss: 2.9468005299568176
Epoch 110 completed out of 1000 loss: 2.945500910282135
Epoch 111 completed out of 1000 loss: 2.944521427154541
Epoch 112 completed out of 1000 loss: 2.9433568120002747
Epoch 113 completed out of 1000 loss: 2.9422484040260315
Epoch 114 completed out of 1000 loss: 2.9411641359329224
Epoch 115 completed out of 1000 loss: 2.940169930458069
Epoch 116 completed out of 1000 loss: 2.9391945004463196
Epoch 117 completed out of 1000 loss: 2.9382861256599426
Epoch 118 completed out of 1000 loss: 2.9374343752861023
Epoch 119 completed out of 1000 loss: 2.936637043952942
Epoch 120 completed out of 1000 loss: 2.935859262943268
Epoch 121 completed out of 1000 loss: 2.93513822555542
Epoch 122 completed out of 1000 loss: 2.934430778026581
Epoch 123 completed out of 1000 loss: 2.933677554130554
Epoch 124 completed out of 1000 loss: 2.9328958988189697
Epoch 125 completed out of 1000 loss: 2.9320359230041504
Epoch 126 completed out of 1000 loss: 2.9310519695281982
Epoch 127 completed out of 1000 loss: 2.929910123348236
Epoch 128 completed out of 1000 loss: 2.9289018511772156
Epoch 129 completed out of 1000 loss: 2.9280452728271484
Epoch 130 completed out of 1000 loss: 2.927211344242096
Epoch 131 completed out of 1000 loss: 2.9263983368873596
Epoch 132 completed out of 1000 loss: 2.92558616399765
Epoch 133 completed out of 1000 loss: 2.924802541732788
Epoch 134 completed out of 1000 loss: 2.924089193344116
Epoch 135 completed out of 1000 loss: 2.923458993434906
Epoch 136 completed out of 1000 loss: 2.9228692054748535
Epoch 137 completed out of 1000 loss: 2.9222739338874817
Epoch 138 completed out of 1000 loss: 2.9216237664222717
Epoch 139 completed out of 1000 loss: 2.9208778738975525
Epoch 140 completed out of 1000 loss: 2.920034408569336
Epoch 141 completed out of 1000 loss: 2.919112980365753
Epoch 142 completed out of 1000 loss: 2.918119966983795
Epoch 143 completed out of 1000 loss: 2.9171640276908875
Epoch 144 completed out of 1000 loss: 2.9162555932998657
Epoch 145 completed out of 1000 loss: 2.9154030680656433
Epoch 146 completed out of 1000 loss: 2.914657175540924
Epoch 147 completed out of 1000 loss: 2.9139073491096497
Epoch 148 completed out of 1000 loss: 2.913055181503296
Epoch 149 completed out of 1000 loss: 2.912074625492096
Epoch 150 completed out of 1000 loss: 2.911029100418091
Epoch 151 completed out of 1000 loss: 2.9100049138069153
Epoch 152 completed out of 1000 loss: 2.9090283513069153
Epoch 153 completed out of 1000 loss: 2.9080387353897095
Epoch 154 completed out of 1000 loss: 2.906984031200409
Epoch 155 completed out of 1000 loss: 2.9058173298835754
Epoch 156 completed out of 1000 loss: 2.904603064060211
Epoch 157 completed out of 1000 loss: 2.903403639793396
Epoch 158 completed out of 1000 loss: 2.9021794199943542
Epoch 159 completed out of 1000 loss: 2.900878131389618
Epoch 160 completed out of 1000 loss: 2.899436831474304
Epoch 161 completed out of 1000 loss: 2.8978506326675415
Epoch 162 completed out of 1000 loss: 2.896293878555298
Epoch 163 completed out of 1000 loss: 2.8946988582611084
Epoch 164 completed out of 1000 loss: 2.8930030465126038
Epoch 165 completed out of 1000 loss: 2.8915171027183533
Epoch 166 completed out of 1000 loss: 2.890318751335144
Epoch 167 completed out of 1000 loss: 2.8893377780914307
Epoch 168 completed out of 1000 loss: 2.888425350189209
Epoch 169 completed out of 1000 loss: 2.8874850273132324
Epoch 170 completed out of 1000 loss: 2.886618971824646
Epoch 171 completed out of 1000 loss: 2.8857858777046204
Epoch 172 completed out of 1000 loss: 2.884995222091675
Epoch 173 completed out of 1000 loss: 2.884241223335266
Epoch 174 completed out of 1000 loss: 2.8835148215293884
Epoch 175 completed out of 1000 loss: 2.882812976837158
Epoch 176 completed out of 1000 loss: 2.8821158409118652
Epoch 177 completed out of 1000 loss: 2.8814231157302856
Epoch 178 completed out of 1000 loss: 2.880797326564789
Epoch 179 completed out of 1000 loss: 2.8802549242973328
Epoch 180 completed out of 1000 loss: 2.8797659873962402
Epoch 181 completed out of 1000 loss: 2.8793033957481384
Epoch 182 completed out of 1000 loss: 2.878857135772705
Epoch 183 completed out of 1000 loss: 2.8784327507019043
Epoch 184 completed out of 1000 loss: 2.878060042858124
Epoch 185 completed out of 1000 loss: 2.8777190446853638
Epoch 186 completed out of 1000 loss: 2.877383828163147
Epoch 187 completed out of 1000 loss: 2.877058506011963
Epoch 188 completed out of 1000 loss: 2.8767516016960144
Epoch 189 completed out of 1000 loss: 2.876455843448639
Epoch 190 completed out of 1000 loss: 2.876172661781311
Epoch 191 completed out of 1000 loss: 2.875896453857422
Epoch 192 completed out of 1000 loss: 2.8756306767463684
Epoch 193 completed out of 1000 loss: 2.875369369983673
Epoch 194 completed out of 1000 loss: 2.8751181960105896
Epoch 195 completed out of 1000 loss: 2.874875247478485
Epoch 196 completed out of 1000 loss: 2.874636948108673
Epoch 197 completed out of 1000 loss: 2.874407112598419
Epoch 198 completed out of 1000 loss: 2.874181389808655
Epoch 199 completed out of 1000 loss: 2.873959958553314
Epoch 200 completed out of 1000 loss: 2.8737411499023438
Epoch 201 completed out of 1000 loss: 2.873524308204651
Epoch 202 completed out of 1000 loss: 2.873303771018982
Epoch 203 completed out of 1000 loss: 2.8730850219726562
Epoch 204 completed out of 1000 loss: 2.872870087623596
Epoch 205 completed out of 1000 loss: 2.8726609349250793
Epoch 206 completed out of 1000 loss: 2.8724491596221924
Epoch 207 completed out of 1000 loss: 2.872234523296356
Epoch 208 completed out of 1000 loss: 2.8720195293426514
Epoch 209 completed out of 1000 loss: 2.871799409389496
Epoch 210 completed out of 1000 loss: 2.871576726436615
Epoch 211 completed out of 1000 loss: 2.8713455200195312
Epoch 212 completed out of 1000 loss: 2.8711090683937073
Epoch 213 completed out of 1000 loss: 2.870874345302582
Epoch 214 completed out of 1000 loss: 2.870649516582489
Epoch 215 completed out of 1000 loss: 2.87043434381485
Epoch 216 completed out of 1000 loss: 2.870229959487915
Epoch 217 completed out of 1000 loss: 2.87003356218338
Epoch 218 completed out of 1000 loss: 2.8698466420173645
Epoch 219 completed out of 1000 loss: 2.869658410549164
Epoch 220 completed out of 1000 loss: 2.8694745302200317
Epoch 221 completed out of 1000 loss: 2.8692888021469116
Epoch 222 completed out of 1000 loss: 2.8691049814224243
Epoch 223 completed out of 1000 loss: 2.8689197301864624
Epoch 224 completed out of 1000 loss: 2.868736207485199
Epoch 225 completed out of 1000 loss: 2.8685532212257385
Epoch 226 completed out of 1000 loss: 2.868372619152069
Epoch 227 completed out of 1000 loss: 2.8681886792182922
Epoch 228 completed out of 1000 loss: 2.867997646331787
Epoch 229 completed out of 1000 loss: 2.867803692817688
Epoch 230 completed out of 1000 loss: 2.8676064014434814
Epoch 231 completed out of 1000 loss: 2.867412567138672
Epoch 232 completed out of 1000 loss: 2.8672207593917847
Epoch 233 completed out of 1000 loss: 2.8670290112495422
Epoch 234 completed out of 1000 loss: 2.866834282875061
Epoch 235 completed out of 1000 loss: 2.8666393160820007
Epoch 236 completed out of 1000 loss: 2.866445541381836
Epoch 237 completed out of 1000 loss: 2.866252601146698
Epoch 238 completed out of 1000 loss: 2.8660624027252197
Epoch 239 completed out of 1000 loss: 2.8658735156059265
Epoch 240 completed out of 1000 loss: 2.865688741207123
Epoch 241 completed out of 1000 loss: 2.8655070066452026
Epoch 242 completed out of 1000 loss: 2.8653231263160706
Epoch 243 completed out of 1000 loss: 2.8651402592658997
Epoch 244 completed out of 1000 loss: 2.8649574518203735
Epoch 245 completed out of 1000 loss: 2.8647767901420593
Epoch 246 completed out of 1000 loss: 2.8645939230918884
Epoch 247 completed out of 1000 loss: 2.8644099831581116
Epoch 248 completed out of 1000 loss: 2.8642290830612183
Epoch 249 completed out of 1000 loss: 2.8640472292900085
Epoch 250 completed out of 1000 loss: 2.863864302635193
Epoch 251 completed out of 1000 loss: 2.863680899143219
Epoch 252 completed out of 1000 loss: 2.863498091697693
Epoch 253 completed out of 1000 loss: 2.863313913345337
Epoch 254 completed out of 1000 loss: 2.863125741481781
Epoch 255 completed out of 1000 loss: 2.8629417419433594
Epoch 256 completed out of 1000 loss: 2.86275315284729
Epoch 257 completed out of 1000 loss: 2.862565279006958
Epoch 258 completed out of 1000 loss: 2.8623772859573364
Epoch 259 completed out of 1000 loss: 2.86218523979187
Epoch 260 completed out of 1000 loss: 2.861994683742523
Epoch 261 completed out of 1000 loss: 2.8618017435073853
Epoch 262 completed out of 1000 loss: 2.8616063594818115
Epoch 263 completed out of 1000 loss: 2.8614121079444885
Epoch 264 completed out of 1000 loss: 2.861216187477112
Epoch 265 completed out of 1000 loss: 2.861018121242523
Epoch 266 completed out of 1000 loss: 2.8608184456825256
Epoch 267 completed out of 1000 loss: 2.860614836215973
Epoch 268 completed out of 1000 loss: 2.8604137301445007
Epoch 269 completed out of 1000 loss: 2.8602095246315002
Epoch 270 completed out of 1000 loss: 2.8600032329559326
Epoch 271 completed out of 1000 loss: 2.859797775745392
Epoch 272 completed out of 1000 loss: 2.8595855832099915
Epoch 273 completed out of 1000 loss: 2.8593729734420776
Epoch 274 completed out of 1000 loss: 2.859160840511322
Epoch 275 completed out of 1000 loss: 2.8589473962783813
Epoch 276 completed out of 1000 loss: 2.8587284684181213
Epoch 277 completed out of 1000 loss: 2.85850989818573
Epoch 278 completed out of 1000 loss: 2.858289122581482
Epoch 279 completed out of 1000 loss: 2.858065128326416
Epoch 280 completed out of 1000 loss: 2.8578421473503113
Epoch 281 completed out of 1000 loss: 2.8576106429100037
Epoch 282 completed out of 1000 loss: 2.857382893562317
Epoch 283 completed out of 1000 loss: 2.85714989900589
Epoch 284 completed out of 1000 loss: 2.856913983821869
Epoch 285 completed out of 1000 loss: 2.8566770553588867
Epoch 286 completed out of 1000 loss: 2.856434643268585
Epoch 287 completed out of 1000 loss: 2.8561907410621643
Epoch 288 completed out of 1000 loss: 2.855941116809845
Epoch 289 completed out of 1000 loss: 2.855688512325287
Epoch 290 completed out of 1000 loss: 2.855427086353302
Epoch 291 completed out of 1000 loss: 2.8551565408706665
Epoch 292 completed out of 1000 loss: 2.8548790216445923
Epoch 293 completed out of 1000 loss: 2.8545942902565002
Epoch 294 completed out of 1000 loss: 2.8543092608451843
Epoch 295 completed out of 1000 loss: 2.85401850938797
Epoch 296 completed out of 1000 loss: 2.853725790977478
Epoch 297 completed out of 1000 loss: 2.8534279465675354
Epoch 298 completed out of 1000 loss: 2.853127658367157
Epoch 299 completed out of 1000 loss: 2.8528314232826233
Epoch 300 completed out of 1000 loss: 2.8525368571281433
Epoch 301 completed out of 1000 loss: 2.8522507548332214
Epoch 302 completed out of 1000 loss: 2.851964592933655
Epoch 303 completed out of 1000 loss: 2.8516804575920105
Epoch 304 completed out of 1000 loss: 2.851395070552826
Epoch 305 completed out of 1000 loss: 2.851108133792877
Epoch 306 completed out of 1000 loss: 2.850820779800415
Epoch 307 completed out of 1000 loss: 2.850531578063965
Epoch 308 completed out of 1000 loss: 2.850237488746643
Epoch 309 completed out of 1000 loss: 2.8499430418014526
Epoch 310 completed out of 1000 loss: 2.8496459126472473
Epoch 311 completed out of 1000 loss: 2.849345088005066
Epoch 312 completed out of 1000 loss: 2.8490418195724487
Epoch 313 completed out of 1000 loss: 2.8487329483032227
Epoch 314 completed out of 1000 loss: 2.8484224677085876
Epoch 315 completed out of 1000 loss: 2.8481053709983826
Epoch 316 completed out of 1000 loss: 2.847781240940094
Epoch 317 completed out of 1000 loss: 2.8474465012550354
Epoch 318 completed out of 1000 loss: 2.847096025943756
Epoch 319 completed out of 1000 loss: 2.8467366695404053
Epoch 320 completed out of 1000 loss: 2.8463903069496155
Epoch 321 completed out of 1000 loss: 2.8460588455200195
Epoch 322 completed out of 1000 loss: 2.8457422256469727
Epoch 323 completed out of 1000 loss: 2.845425546169281
Epoch 324 completed out of 1000 loss: 2.8451064229011536
Epoch 325 completed out of 1000 loss: 2.844782292842865
Epoch 326 completed out of 1000 loss: 2.8444550037384033
Epoch 327 completed out of 1000 loss: 2.8441218733787537
Epoch 328 completed out of 1000 loss: 2.843783676624298
Epoch 329 completed out of 1000 loss: 2.843439757823944
Epoch 330 completed out of 1000 loss: 2.843087375164032
Epoch 331 completed out of 1000 loss: 2.8427249789237976
Epoch 332 completed out of 1000 loss: 2.842353940010071
Epoch 333 completed out of 1000 loss: 2.8419675827026367
Epoch 334 completed out of 1000 loss: 2.8415660858154297
Epoch 335 completed out of 1000 loss: 2.8411468267440796
Epoch 336 completed out of 1000 loss: 2.8407163619995117
Epoch 337 completed out of 1000 loss: 2.840280532836914
Epoch 338 completed out of 1000 loss: 2.8398303389549255
Epoch 339 completed out of 1000 loss: 2.8393684029579163
Epoch 340 completed out of 1000 loss: 2.838887333869934
Epoch 341 completed out of 1000 loss: 2.8383941054344177
Epoch 342 completed out of 1000 loss: 2.837883949279785
Epoch 343 completed out of 1000 loss: 2.8373570442199707
Epoch 344 completed out of 1000 loss: 2.8368029594421387
Epoch 345 completed out of 1000 loss: 2.836221218109131
Epoch 346 completed out of 1000 loss: 2.8356111645698547
Epoch 347 completed out of 1000 loss: 2.834965169429779
Epoch 348 completed out of 1000 loss: 2.834278702735901
Epoch 349 completed out of 1000 loss: 2.8335493206977844
Epoch 350 completed out of 1000 loss: 2.8327673077583313
Epoch 351 completed out of 1000 loss: 2.8319312930107117
Epoch 352 completed out of 1000 loss: 2.8310316801071167
Epoch 353 completed out of 1000 loss: 2.8300639390945435
Epoch 354 completed out of 1000 loss: 2.8290221095085144
Epoch 355 completed out of 1000 loss: 2.827905833721161
Epoch 356 completed out of 1000 loss: 2.826712191104889
Epoch 357 completed out of 1000 loss: 2.825442373752594
Epoch 358 completed out of 1000 loss: 2.8241052627563477
Epoch 359 completed out of 1000 loss: 2.8227111101150513
Epoch 360 completed out of 1000 loss: 2.821272373199463
Epoch 361 completed out of 1000 loss: 2.8197981119155884
Epoch 362 completed out of 1000 loss: 2.818310260772705
Epoch 363 completed out of 1000 loss: 2.8168211579322815
Epoch 364 completed out of 1000 loss: 2.815342366695404
Epoch 365 completed out of 1000 loss: 2.813888728618622
Epoch 366 completed out of 1000 loss: 2.8124725222587585
Epoch 367 completed out of 1000 loss: 2.811101794242859
Epoch 368 completed out of 1000 loss: 2.809781491756439
Epoch 369 completed out of 1000 loss: 2.8085126876831055
Epoch 370 completed out of 1000 loss: 2.8072909712791443
Epoch 371 completed out of 1000 loss: 2.806110203266144
Epoch 372 completed out of 1000 loss: 2.8049439191818237
Epoch 373 completed out of 1000 loss: 2.8038153052330017
Epoch 374 completed out of 1000 loss: 2.802696704864502
Epoch 375 completed out of 1000 loss: 2.80159854888916
Epoch 376 completed out of 1000 loss: 2.800545334815979
Epoch 377 completed out of 1000 loss: 2.7995141744613647
Epoch 378 completed out of 1000 loss: 2.7985137701034546
Epoch 379 completed out of 1000 loss: 2.797551453113556
Epoch 380 completed out of 1000 loss: 2.7966273427009583
Epoch 381 completed out of 1000 loss: 2.7957370281219482
Epoch 382 completed out of 1000 loss: 2.794884204864502
Epoch 383 completed out of 1000 loss: 2.7940651774406433
Epoch 384 completed out of 1000 loss: 2.793264150619507
Epoch 385 completed out of 1000 loss: 2.7924748063087463
Epoch 386 completed out of 1000 loss: 2.7917274236679077
Epoch 387 completed out of 1000 loss: 2.7910297513008118
Epoch 388 completed out of 1000 loss: 2.7903396487236023
Epoch 389 completed out of 1000 loss: 2.7896814942359924
Epoch 390 completed out of 1000 loss: 2.789027690887451
Epoch 391 completed out of 1000 loss: 2.788383960723877
Epoch 392 completed out of 1000 loss: 2.7877813577651978
Epoch 393 completed out of 1000 loss: 2.7871960401535034
Epoch 394 completed out of 1000 loss: 2.7866013050079346
Epoch 395 completed out of 1000 loss: 2.7860252261161804
Epoch 396 completed out of 1000 loss: 2.785451889038086
Epoch 397 completed out of 1000 loss: 2.7848836183547974
Epoch 398 completed out of 1000 loss: 2.784306228160858
Epoch 399 completed out of 1000 loss: 2.7837342023849487
Epoch 400 completed out of 1000 loss: 2.783153712749481
Epoch 401 completed out of 1000 loss: 2.782566726207733
Epoch 402 completed out of 1000 loss: 2.7819665670394897
Epoch 403 completed out of 1000 loss: 2.781363368034363
Epoch 404 completed out of 1000 loss: 2.7807469367980957
Epoch 405 completed out of 1000 loss: 2.7801210284233093
Epoch 406 completed out of 1000 loss: 2.7794782519340515
Epoch 407 completed out of 1000 loss: 2.7788220643997192
Epoch 408 completed out of 1000 loss: 2.7781514525413513
Epoch 409 completed out of 1000 loss: 2.7774584889411926
Epoch 410 completed out of 1000 loss: 2.7767531871795654
Epoch 411 completed out of 1000 loss: 2.776048243045807
Epoch 412 completed out of 1000 loss: 2.775328278541565
Epoch 413 completed out of 1000 loss: 2.7745959758758545
Epoch 414 completed out of 1000 loss: 2.773854970932007
Epoch 415 completed out of 1000 loss: 2.7731019854545593
Epoch 416 completed out of 1000 loss: 2.7723387479782104
Epoch 417 completed out of 1000 loss: 2.77156662940979
Epoch 418 completed out of 1000 loss: 2.770789861679077
Epoch 419 completed out of 1000 loss: 2.7700015902519226
Epoch 420 completed out of 1000 loss: 2.7692083716392517
Epoch 421 completed out of 1000 loss: 2.7684078216552734
Epoch 422 completed out of 1000 loss: 2.7676044702529907
Epoch 423 completed out of 1000 loss: 2.7667961716651917
Epoch 424 completed out of 1000 loss: 2.7659891843795776
Epoch 425 completed out of 1000 loss: 2.7651840448379517
Epoch 426 completed out of 1000 loss: 2.7643860578536987
Epoch 427 completed out of 1000 loss: 2.763599216938019
Epoch 428 completed out of 1000 loss: 2.7628270983695984
Epoch 429 completed out of 1000 loss: 2.762074887752533
Epoch 430 completed out of 1000 loss: 2.7613478302955627
Epoch 431 completed out of 1000 loss: 2.760641872882843
Epoch 432 completed out of 1000 loss: 2.7599639892578125
Epoch 433 completed out of 1000 loss: 2.7593421936035156
Epoch 434 completed out of 1000 loss: 2.7587499618530273
Epoch 435 completed out of 1000 loss: 2.758191227912903
Epoch 436 completed out of 1000 loss: 2.7576707005500793
Epoch 437 completed out of 1000 loss: 2.757182240486145
Epoch 438 completed out of 1000 loss: 2.756726622581482
Epoch 439 completed out of 1000 loss: 2.7562976479530334
Epoch 440 completed out of 1000 loss: 2.755891740322113
Epoch 441 completed out of 1000 loss: 2.755504071712494
Epoch 442 completed out of 1000 loss: 2.7551376819610596
Epoch 443 completed out of 1000 loss: 2.754786193370819
Epoch 444 completed out of 1000 loss: 2.7544469833374023
Epoch 445 completed out of 1000 loss: 2.7541213035583496
Epoch 446 completed out of 1000 loss: 2.753804922103882
Epoch 447 completed out of 1000 loss: 2.7534968852996826
Epoch 448 completed out of 1000 loss: 2.7531954050064087
Epoch 449 completed out of 1000 loss: 2.7529028058052063
Epoch 450 completed out of 1000 loss: 2.752614915370941
Epoch 451 completed out of 1000 loss: 2.752330183982849
Epoch 452 completed out of 1000 loss: 2.7520519495010376
Epoch 453 completed out of 1000 loss: 2.7517743706703186
Epoch 454 completed out of 1000 loss: 2.7514951825141907
Epoch 455 completed out of 1000 loss: 2.7512179017066956
Epoch 456 completed out of 1000 loss: 2.750941216945648
Epoch 457 completed out of 1000 loss: 2.750659465789795
Epoch 458 completed out of 1000 loss: 2.750379502773285
Epoch 459 completed out of 1000 loss: 2.750098764896393
Epoch 460 completed out of 1000 loss: 2.749817192554474
Epoch 461 completed out of 1000 loss: 2.749530792236328
Epoch 462 completed out of 1000 loss: 2.749235987663269
Epoch 463 completed out of 1000 loss: 2.748936355113983
Epoch 464 completed out of 1000 loss: 2.74862402677536
Epoch 465 completed out of 1000 loss: 2.748301863670349
Epoch 466 completed out of 1000 loss: 2.747969329357147
Epoch 467 completed out of 1000 loss: 2.7476181387901306
Epoch 468 completed out of 1000 loss: 2.7472530603408813
Epoch 469 completed out of 1000 loss: 2.7468703985214233
Epoch 470 completed out of 1000 loss: 2.7464669346809387
Epoch 471 completed out of 1000 loss: 2.7460442781448364
Epoch 472 completed out of 1000 loss: 2.74559885263443
Epoch 473 completed out of 1000 loss: 2.74513041973114
Epoch 474 completed out of 1000 loss: 2.74464350938797
Epoch 475 completed out of 1000 loss: 2.7441383004188538
Epoch 476 completed out of 1000 loss: 2.7436238527297974
Epoch 477 completed out of 1000 loss: 2.74310439825058
Epoch 478 completed out of 1000 loss: 2.7425957918167114
Epoch 479 completed out of 1000 loss: 2.7421082854270935
Epoch 480 completed out of 1000 loss: 2.7416478991508484
Epoch 481 completed out of 1000 loss: 2.741228997707367
Epoch 482 completed out of 1000 loss: 2.740854024887085
Epoch 483 completed out of 1000 loss: 2.7405271530151367
Epoch 484 completed out of 1000 loss: 2.7402423620224
Epoch 485 completed out of 1000 loss: 2.7399940490722656
Epoch 486 completed out of 1000 loss: 2.739773988723755
Epoch 487 completed out of 1000 loss: 2.739573895931244
Epoch 488 completed out of 1000 loss: 2.7393919825553894
Epoch 489 completed out of 1000 loss: 2.7392221689224243
Epoch 490 completed out of 1000 loss: 2.739057183265686
Epoch 491 completed out of 1000 loss: 2.7389016151428223
Epoch 492 completed out of 1000 loss: 2.738747537136078
Epoch 493 completed out of 1000 loss: 2.7385955452919006
Epoch 494 completed out of 1000 loss: 2.738447427749634
Epoch 495 completed out of 1000 loss: 2.738300919532776
Epoch 496 completed out of 1000 loss: 2.738158345222473
Epoch 497 completed out of 1000 loss: 2.7380154132843018
Epoch 498 completed out of 1000 loss: 2.7378751039505005
Epoch 499 completed out of 1000 loss: 2.737736225128174
Epoch 500 completed out of 1000 loss: 2.737598717212677
Epoch 501 completed out of 1000 loss: 2.737464427947998
Epoch 502 completed out of 1000 loss: 2.7373311519622803
Epoch 503 completed out of 1000 loss: 2.737198233604431
Epoch 504 completed out of 1000 loss: 2.7370673418045044
Epoch 505 completed out of 1000 loss: 2.7369351983070374
Epoch 506 completed out of 1000 loss: 2.736805558204651
Epoch 507 completed out of 1000 loss: 2.736678421497345
Epoch 508 completed out of 1000 loss: 2.7365512251853943
Epoch 509 completed out of 1000 loss: 2.7364248037338257
Epoch 510 completed out of 1000 loss: 2.73629891872406
Epoch 511 completed out of 1000 loss: 2.7361737489700317
Epoch 512 completed out of 1000 loss: 2.736050546169281
Epoch 513 completed out of 1000 loss: 2.7359264492988586
Epoch 514 completed out of 1000 loss: 2.7358016967773438
Epoch 515 completed out of 1000 loss: 2.7356820702552795
Epoch 516 completed out of 1000 loss: 2.735559582710266
Epoch 517 completed out of 1000 loss: 2.7354378700256348
Epoch 518 completed out of 1000 loss: 2.7353169322013855
Epoch 519 completed out of 1000 loss: 2.7351943254470825
Epoch 520 completed out of 1000 loss: 2.73507422208786
Epoch 521 completed out of 1000 loss: 2.734955072402954
Epoch 522 completed out of 1000 loss: 2.7348342537879944
Epoch 523 completed out of 1000 loss: 2.7347142100334167
Epoch 524 completed out of 1000 loss: 2.7345961928367615
Epoch 525 completed out of 1000 loss: 2.7344780564308167
Epoch 526 completed out of 1000 loss: 2.7343603372573853
Epoch 527 completed out of 1000 loss: 2.7342432141304016
Epoch 528 completed out of 1000 loss: 2.734127104282379
Epoch 529 completed out of 1000 loss: 2.7340124249458313
Epoch 530 completed out of 1000 loss: 2.733897030353546
Epoch 531 completed out of 1000 loss: 2.733783185482025
Epoch 532 completed out of 1000 loss: 2.733670651912689
Epoch 533 completed out of 1000 loss: 2.733558416366577
Epoch 534 completed out of 1000 loss: 2.73344224691391
Epoch 535 completed out of 1000 loss: 2.7333300709724426
Epoch 536 completed out of 1000 loss: 2.7332153916358948
Epoch 537 completed out of 1000 loss: 2.7331011295318604
Epoch 538 completed out of 1000 loss: 2.7329869866371155
Epoch 539 completed out of 1000 loss: 2.732870399951935
Epoch 540 completed out of 1000 loss: 2.7327533960342407
Epoch 541 completed out of 1000 loss: 2.732636511325836
Epoch 542 completed out of 1000 loss: 2.7325228452682495
Epoch 543 completed out of 1000 loss: 2.7324076294898987
Epoch 544 completed out of 1000 loss: 2.732294797897339
Epoch 545 completed out of 1000 loss: 2.732181668281555
Epoch 546 completed out of 1000 loss: 2.732069671154022
Epoch 547 completed out of 1000 loss: 2.7319589853286743
Epoch 548 completed out of 1000 loss: 2.7318485379219055
Epoch 549 completed out of 1000 loss: 2.731735587120056
Epoch 550 completed out of 1000 loss: 2.731625735759735
Epoch 551 completed out of 1000 loss: 2.73151171207428
Epoch 552 completed out of 1000 loss: 2.7314006090164185
Epoch 553 completed out of 1000 loss: 2.7312896251678467
Epoch 554 completed out of 1000 loss: 2.7311742901802063
Epoch 555 completed out of 1000 loss: 2.7310640811920166
Epoch 556 completed out of 1000 loss: 2.730951249599457
Epoch 557 completed out of 1000 loss: 2.730838894844055
Epoch 558 completed out of 1000 loss: 2.7307254672050476
Epoch 559 completed out of 1000 loss: 2.730611801147461
Epoch 560 completed out of 1000 loss: 2.7304998636245728
Epoch 561 completed out of 1000 loss: 2.7303885221481323
Epoch 562 completed out of 1000 loss: 2.730272352695465
Epoch 563 completed out of 1000 loss: 2.7301588654518127
Epoch 564 completed out of 1000 loss: 2.73004549741745
Epoch 565 completed out of 1000 loss: 2.7299304008483887
Epoch 566 completed out of 1000 loss: 2.729816734790802
Epoch 567 completed out of 1000 loss: 2.7297016978263855
Epoch 568 completed out of 1000 loss: 2.729589819908142
Epoch 569 completed out of 1000 loss: 2.7294735312461853
Epoch 570 completed out of 1000 loss: 2.729357898235321
Epoch 571 completed out of 1000 loss: 2.729243040084839
Epoch 572 completed out of 1000 loss: 2.729126989841461
Epoch 573 completed out of 1000 loss: 2.7290119528770447
Epoch 574 completed out of 1000 loss: 2.7288964986801147
Epoch 575 completed out of 1000 loss: 2.7287814617156982
Epoch 576 completed out of 1000 loss: 2.7286652326583862
Epoch 577 completed out of 1000 loss: 2.7285505533218384
Epoch 578 completed out of 1000 loss: 2.728434443473816
Epoch 579 completed out of 1000 loss: 2.728318691253662
Epoch 580 completed out of 1000 loss: 2.7282023429870605
Epoch 581 completed out of 1000 loss: 2.728085696697235
Epoch 582 completed out of 1000 loss: 2.7279695868492126
Epoch 583 completed out of 1000 loss: 2.727854311466217
Epoch 584 completed out of 1000 loss: 2.7277384996414185
Epoch 585 completed out of 1000 loss: 2.727622389793396
Epoch 586 completed out of 1000 loss: 2.727504253387451
Epoch 587 completed out of 1000 loss: 2.7273879647254944
Epoch 588 completed out of 1000 loss: 2.7272722721099854
Epoch 589 completed out of 1000 loss: 2.7271512150764465
Epoch 590 completed out of 1000 loss: 2.7270334362983704
Epoch 591 completed out of 1000 loss: 2.7269150614738464
Epoch 592 completed out of 1000 loss: 2.7267953157424927
Epoch 593 completed out of 1000 loss: 2.726674258708954
Epoch 594 completed out of 1000 loss: 2.726552724838257
Epoch 595 completed out of 1000 loss: 2.7264307737350464
Epoch 596 completed out of 1000 loss: 2.7263094186782837
Epoch 597 completed out of 1000 loss: 2.7261873483657837
Epoch 598 completed out of 1000 loss: 2.7260645627975464
Epoch 599 completed out of 1000 loss: 2.7259421348571777
Epoch 600 completed out of 1000 loss: 2.7258219718933105
Epoch 601 completed out of 1000 loss: 2.7257004976272583
Epoch 602 completed out of 1000 loss: 2.725578188896179
Epoch 603 completed out of 1000 loss: 2.7254565358161926
Epoch 604 completed out of 1000 loss: 2.7253376841545105
Epoch 605 completed out of 1000 loss: 2.7252161502838135
Epoch 606 completed out of 1000 loss: 2.7250974774360657
Epoch 607 completed out of 1000 loss: 2.7249783277511597
Epoch 608 completed out of 1000 loss: 2.724858283996582
Epoch 609 completed out of 1000 loss: 2.7247408032417297
Epoch 610 completed out of 1000 loss: 2.724622130393982
Epoch 611 completed out of 1000 loss: 2.724503457546234
Epoch 612 completed out of 1000 loss: 2.7243862748146057
Epoch 613 completed out of 1000 loss: 2.724266469478607
Epoch 614 completed out of 1000 loss: 2.724147379398346
Epoch 615 completed out of 1000 loss: 2.7240296006202698
Epoch 616 completed out of 1000 loss: 2.7239108085632324
Epoch 617 completed out of 1000 loss: 2.723791480064392
Epoch 618 completed out of 1000 loss: 2.723671793937683
Epoch 619 completed out of 1000 loss: 2.7235520482063293
Epoch 620 completed out of 1000 loss: 2.72343373298645
Epoch 621 completed out of 1000 loss: 2.7233136892318726
Epoch 622 completed out of 1000 loss: 2.7231913805007935
Epoch 623 completed out of 1000 loss: 2.7230732440948486
Epoch 624 completed out of 1000 loss: 2.722950577735901
Epoch 625 completed out of 1000 loss: 2.7228273153305054
Epoch 626 completed out of 1000 loss: 2.722706437110901
Epoch 627 completed out of 1000 loss: 2.722584843635559
Epoch 628 completed out of 1000 loss: 2.7224631309509277
Epoch 629 completed out of 1000 loss: 2.722338914871216
Epoch 630 completed out of 1000 loss: 2.7222139835357666
Epoch 631 completed out of 1000 loss: 2.722090721130371
Epoch 632 completed out of 1000 loss: 2.7219667434692383
Epoch 633 completed out of 1000 loss: 2.721844792366028
Epoch 634 completed out of 1000 loss: 2.7217180132865906
Epoch 635 completed out of 1000 loss: 2.721594989299774
Epoch 636 completed out of 1000 loss: 2.721470892429352
Epoch 637 completed out of 1000 loss: 2.7213438749313354
Epoch 638 completed out of 1000 loss: 2.721219301223755
Epoch 639 completed out of 1000 loss: 2.7210922837257385
Epoch 640 completed out of 1000 loss: 2.7209652066230774
Epoch 641 completed out of 1000 loss: 2.7208398580551147
Epoch 642 completed out of 1000 loss: 2.7207130193710327
Epoch 643 completed out of 1000 loss: 2.7205843329429626
Epoch 644 completed out of 1000 loss: 2.7204557061195374
Epoch 645 completed out of 1000 loss: 2.720328450202942
Epoch 646 completed out of 1000 loss: 2.7201990485191345
Epoch 647 completed out of 1000 loss: 2.720069706439972
Epoch 648 completed out of 1000 loss: 2.7199400663375854
Epoch 649 completed out of 1000 loss: 2.719811260700226
Epoch 650 completed out of 1000 loss: 2.7196799516677856
Epoch 651 completed out of 1000 loss: 2.7195486426353455
Epoch 652 completed out of 1000 loss: 2.719417929649353
Epoch 653 completed out of 1000 loss: 2.7192861437797546
Epoch 654 completed out of 1000 loss: 2.719154953956604
Epoch 655 completed out of 1000 loss: 2.7190215587615967
Epoch 656 completed out of 1000 loss: 2.718888282775879
Epoch 657 completed out of 1000 loss: 2.7187564969062805
Epoch 658 completed out of 1000 loss: 2.7186237573623657
Epoch 659 completed out of 1000 loss: 2.7184908390045166
Epoch 660 completed out of 1000 loss: 2.7183586955070496
Epoch 661 completed out of 1000 loss: 2.7182239294052124
Epoch 662 completed out of 1000 loss: 2.7180898189544678
Epoch 663 completed out of 1000 loss: 2.7179550528526306
Epoch 664 completed out of 1000 loss: 2.7178229093551636
Epoch 665 completed out of 1000 loss: 2.717687249183655
Epoch 666 completed out of 1000 loss: 2.7175557613372803
Epoch 667 completed out of 1000 loss: 2.717422842979431
Epoch 668 completed out of 1000 loss: 2.717289388179779
Epoch 669 completed out of 1000 loss: 2.7171565294265747
Epoch 670 completed out of 1000 loss: 2.7170244455337524
Epoch 671 completed out of 1000 loss: 2.716892421245575
Epoch 672 completed out of 1000 loss: 2.7167622447013855
Epoch 673 completed out of 1000 loss: 2.716632127761841
Epoch 674 completed out of 1000 loss: 2.716502010822296
Epoch 675 completed out of 1000 loss: 2.7163732051849365
Epoch 676 completed out of 1000 loss: 2.716244637966156
Epoch 677 completed out of 1000 loss: 2.716118335723877
Epoch 678 completed out of 1000 loss: 2.7159929871559143
Epoch 679 completed out of 1000 loss: 2.71586674451828
Epoch 680 completed out of 1000 loss: 2.7157429456710815
Epoch 681 completed out of 1000 loss: 2.7156187295913696
Epoch 682 completed out of 1000 loss: 2.715498149394989
Epoch 683 completed out of 1000 loss: 2.7153783440589905
Epoch 684 completed out of 1000 loss: 2.715260326862335
Epoch 685 completed out of 1000 loss: 2.7151423692703247
Epoch 686 completed out of 1000 loss: 2.7150269150733948
Epoch 687 completed out of 1000 loss: 2.714910566806793
Epoch 688 completed out of 1000 loss: 2.7147971987724304
Epoch 689 completed out of 1000 loss: 2.7146835923194885
Epoch 690 completed out of 1000 loss: 2.714574098587036
Epoch 691 completed out of 1000 loss: 2.7144660353660583
Epoch 692 completed out of 1000 loss: 2.714357316493988
Epoch 693 completed out of 1000 loss: 2.7142492532730103
Epoch 694 completed out of 1000 loss: 2.7141454219818115
Epoch 695 completed out of 1000 loss: 2.714041590690613
Epoch 696 completed out of 1000 loss: 2.7139381766319275
Epoch 697 completed out of 1000 loss: 2.7138372659683228
Epoch 698 completed out of 1000 loss: 2.713735580444336
Epoch 699 completed out of 1000 loss: 2.713638186454773
Epoch 700 completed out of 1000 loss: 2.7135409712791443
Epoch 701 completed out of 1000 loss: 2.7134416103363037
Epoch 702 completed out of 1000 loss: 2.7133466601371765
Epoch 703 completed out of 1000 loss: 2.7132515907287598
Epoch 704 completed out of 1000 loss: 2.7131581902503967
Epoch 705 completed out of 1000 loss: 2.7130630016326904
Epoch 706 completed out of 1000 loss: 2.712972939014435
Epoch 707 completed out of 1000 loss: 2.7128822803497314
Epoch 708 completed out of 1000 loss: 2.71279114484787
Epoch 709 completed out of 1000 loss: 2.712703585624695
Epoch 710 completed out of 1000 loss: 2.7126137614250183
Epoch 711 completed out of 1000 loss: 2.7125247716903687
Epoch 712 completed out of 1000 loss: 2.7124383449554443
Epoch 713 completed out of 1000 loss: 2.7123515605926514
Epoch 714 completed out of 1000 loss: 2.712266683578491
Epoch 715 completed out of 1000 loss: 2.71217942237854
Epoch 716 completed out of 1000 loss: 2.712096393108368
Epoch 717 completed out of 1000 loss: 2.712013065814972
Epoch 718 completed out of 1000 loss: 2.7119293212890625
Epoch 719 completed out of 1000 loss: 2.7118478417396545
Epoch 720 completed out of 1000 loss: 2.711765468120575
Epoch 721 completed out of 1000 loss: 2.7116840481758118
Epoch 722 completed out of 1000 loss: 2.7116015553474426
Epoch 723 completed out of 1000 loss: 2.711520552635193
Epoch 724 completed out of 1000 loss: 2.711439371109009
Epoch 725 completed out of 1000 loss: 2.7113595008850098
Epoch 726 completed out of 1000 loss: 2.7112786173820496
Epoch 727 completed out of 1000 loss: 2.711198329925537
Epoch 728 completed out of 1000 loss: 2.7111184000968933
Epoch 729 completed out of 1000 loss: 2.7110408544540405
Epoch 730 completed out of 1000 loss: 2.710961878299713
Epoch 731 completed out of 1000 loss: 2.710881471633911
Epoch 732 completed out of 1000 loss: 2.71080219745636
Epoch 733 completed out of 1000 loss: 2.710723578929901
Epoch 734 completed out of 1000 loss: 2.71064430475235
Epoch 735 completed out of 1000 loss: 2.710567533969879
Epoch 736 completed out of 1000 loss: 2.7104892134666443
Epoch 737 completed out of 1000 loss: 2.7104124426841736
Epoch 738 completed out of 1000 loss: 2.7103339433670044
Epoch 739 completed out of 1000 loss: 2.7102479338645935
Epoch 740 completed out of 1000 loss: 2.710188567638397
Epoch 741 completed out of 1000 loss: 2.7100961208343506
Epoch 742 completed out of 1000 loss: 2.7100294828414917
Epoch 743 completed out of 1000 loss: 2.7099544405937195
Epoch 744 completed out of 1000 loss: 2.7098811864852905
Epoch 745 completed out of 1000 loss: 2.7098101377487183
Epoch 746 completed out of 1000 loss: 2.7097398042678833
Epoch 747 completed out of 1000 loss: 2.709669589996338
Epoch 748 completed out of 1000 loss: 2.709601044654846
Epoch 749 completed out of 1000 loss: 2.7095313668251038
Epoch 750 completed out of 1000 loss: 2.7094631791114807
Epoch 751 completed out of 1000 loss: 2.709396004676819
Epoch 752 completed out of 1000 loss: 2.7093297839164734
Epoch 753 completed out of 1000 loss: 2.7092654705047607
Epoch 754 completed out of 1000 loss: 2.7092003226280212
Epoch 755 completed out of 1000 loss: 2.7091360688209534
Epoch 756 completed out of 1000 loss: 2.709072709083557
Epoch 757 completed out of 1000 loss: 2.709008038043976
Epoch 758 completed out of 1000 loss: 2.70894718170166
Epoch 759 completed out of 1000 loss: 2.708885967731476
Epoch 760 completed out of 1000 loss: 2.7088258862495422
Epoch 761 completed out of 1000 loss: 2.708765387535095
Epoch 762 completed out of 1000 loss: 2.7087055444717407
Epoch 763 completed out of 1000 loss: 2.708647310733795
Epoch 764 completed out of 1000 loss: 2.7085886001586914
Epoch 765 completed out of 1000 loss: 2.7085326313972473
Epoch 766 completed out of 1000 loss: 2.708475708961487
Epoch 767 completed out of 1000 loss: 2.708419978618622
Epoch 768 completed out of 1000 loss: 2.7083678245544434
Epoch 769 completed out of 1000 loss: 2.70831298828125
Epoch 770 completed out of 1000 loss: 2.7082605361938477
Epoch 771 completed out of 1000 loss: 2.7082087993621826
Epoch 772 completed out of 1000 loss: 2.7081575989723206
Epoch 773 completed out of 1000 loss: 2.70810604095459
Epoch 774 completed out of 1000 loss: 2.708058536052704
Epoch 775 completed out of 1000 loss: 2.708008825778961
Epoch 776 completed out of 1000 loss: 2.7079601287841797
Epoch 777 completed out of 1000 loss: 2.7079103589057922
Epoch 778 completed out of 1000 loss: 2.7078645825386047
Epoch 779 completed out of 1000 loss: 2.70781934261322
Epoch 780 completed out of 1000 loss: 2.7077722549438477
Epoch 781 completed out of 1000 loss: 2.7077264189720154
Epoch 782 completed out of 1000 loss: 2.70768141746521
Epoch 783 completed out of 1000 loss: 2.7076390385627747
Epoch 784 completed out of 1000 loss: 2.7075949907302856
Epoch 785 completed out of 1000 loss: 2.7075499296188354
Epoch 786 completed out of 1000 loss: 2.7075076699256897
Epoch 787 completed out of 1000 loss: 2.7074652910232544
Epoch 788 completed out of 1000 loss: 2.707423686981201
Epoch 789 completed out of 1000 loss: 2.707382082939148
Epoch 790 completed out of 1000 loss: 2.70734041929245
Epoch 791 completed out of 1000 loss: 2.707299292087555
Epoch 792 completed out of 1000 loss: 2.7072589993476868
Epoch 793 completed out of 1000 loss: 2.7072190642356873
Epoch 794 completed out of 1000 loss: 2.70717990398407
Epoch 795 completed out of 1000 loss: 2.7071396112442017
Epoch 796 completed out of 1000 loss: 2.707101821899414
Epoch 797 completed out of 1000 loss: 2.7070632576942444
Epoch 798 completed out of 1000 loss: 2.7070233821868896
Epoch 799 completed out of 1000 loss: 2.706985652446747
Epoch 800 completed out of 1000 loss: 2.7069483399391174
Epoch 801 completed out of 1000 loss: 2.7069111466407776
Epoch 802 completed out of 1000 loss: 2.70687198638916
Epoch 803 completed out of 1000 loss: 2.706835925579071
Epoch 804 completed out of 1000 loss: 2.7067978978157043
Epoch 805 completed out of 1000 loss: 2.7067612409591675
Epoch 806 completed out of 1000 loss: 2.7067272067070007
Epoch 807 completed out of 1000 loss: 2.7066890597343445
Epoch 808 completed out of 1000 loss: 2.706654965877533
Epoch 809 completed out of 1000 loss: 2.706616520881653
Epoch 810 completed out of 1000 loss: 2.706583559513092
Epoch 811 completed out of 1000 loss: 2.7065467834472656
Epoch 812 completed out of 1000 loss: 2.7065123915672302
Epoch 813 completed out of 1000 loss: 2.706477403640747
Epoch 814 completed out of 1000 loss: 2.706442654132843
Epoch 815 completed out of 1000 loss: 2.706406831741333
Epoch 816 completed out of 1000 loss: 2.7063727378845215
Epoch 817 completed out of 1000 loss: 2.706339657306671
Epoch 818 completed out of 1000 loss: 2.7063055634498596
Epoch 819 completed out of 1000 loss: 2.7062697410583496
Epoch 820 completed out of 1000 loss: 2.7062368988990784
Epoch 821 completed out of 1000 loss: 2.7062020897865295
Epoch 822 completed out of 1000 loss: 2.7061703205108643
Epoch 823 completed out of 1000 loss: 2.7061363458633423
Epoch 824 completed out of 1000 loss: 2.706102192401886
Epoch 825 completed out of 1000 loss: 2.706068515777588
Epoch 826 completed out of 1000 loss: 2.7060352563858032
Epoch 827 completed out of 1000 loss: 2.706002175807953
Epoch 828 completed out of 1000 loss: 2.705969214439392
Epoch 829 completed out of 1000 loss: 2.7059342861175537
Epoch 830 completed out of 1000 loss: 2.7059022784233093
Epoch 831 completed out of 1000 loss: 2.7058698534965515
Epoch 832 completed out of 1000 loss: 2.7058369517326355
Epoch 833 completed out of 1000 loss: 2.705805540084839
Epoch 834 completed out of 1000 loss: 2.705772638320923
Epoch 835 completed out of 1000 loss: 2.705739676952362
Epoch 836 completed out of 1000 loss: 2.7057090401649475
Epoch 837 completed out of 1000 loss: 2.705675721168518
Epoch 838 completed out of 1000 loss: 2.705642819404602
Epoch 839 completed out of 1000 loss: 2.705612063407898
Epoch 840 completed out of 1000 loss: 2.7055811285972595
Epoch 841 completed out of 1000 loss: 2.705550253391266
Epoch 842 completed out of 1000 loss: 2.7055192589759827
Epoch 843 completed out of 1000 loss: 2.70548939704895
Epoch 844 completed out of 1000 loss: 2.705458343029022
Epoch 845 completed out of 1000 loss: 2.7054247856140137
Epoch 846 completed out of 1000 loss: 2.7053956985473633
Epoch 847 completed out of 1000 loss: 2.7053651809692383
Epoch 848 completed out of 1000 loss: 2.705333411693573
Epoch 849 completed out of 1000 loss: 2.7053046226501465
Epoch 850 completed out of 1000 loss: 2.705275058746338
Epoch 851 completed out of 1000 loss: 2.7052443623542786
Epoch 852 completed out of 1000 loss: 2.7052133083343506
Epoch 853 completed out of 1000 loss: 2.7051830887794495
Epoch 854 completed out of 1000 loss: 2.7051544189453125
Epoch 855 completed out of 1000 loss: 2.705124795436859
Epoch 856 completed out of 1000 loss: 2.7050943970680237
Epoch 857 completed out of 1000 loss: 2.705064058303833
Epoch 858 completed out of 1000 loss: 2.7050363421440125
Epoch 859 completed out of 1000 loss: 2.7050057649612427
Epoch 860 completed out of 1000 loss: 2.704976201057434
Epoch 861 completed out of 1000 loss: 2.704946279525757
Epoch 862 completed out of 1000 loss: 2.7049182653427124
Epoch 863 completed out of 1000 loss: 2.7048888206481934
Epoch 864 completed out of 1000 loss: 2.70485782623291
Epoch 865 completed out of 1000 loss: 2.7048293948173523
Epoch 866 completed out of 1000 loss: 2.704800069332123
Epoch 867 completed out of 1000 loss: 2.7047706842422485
Epoch 868 completed out of 1000 loss: 2.7047420740127563
Epoch 869 completed out of 1000 loss: 2.704712986946106
Epoch 870 completed out of 1000 loss: 2.704682946205139
Epoch 871 completed out of 1000 loss: 2.704654633998871
Epoch 872 completed out of 1000 loss: 2.7046263813972473
Epoch 873 completed out of 1000 loss: 2.7045966386795044
Epoch 874 completed out of 1000 loss: 2.704567611217499
Epoch 875 completed out of 1000 loss: 2.7045385241508484
Epoch 876 completed out of 1000 loss: 2.7045108675956726
Epoch 877 completed out of 1000 loss: 2.704482316970825
Epoch 878 completed out of 1000 loss: 2.704452872276306
Epoch 879 completed out of 1000 loss: 2.7044243812561035
Epoch 880 completed out of 1000 loss: 2.7043948769569397
Epoch 881 completed out of 1000 loss: 2.704367697238922
Epoch 882 completed out of 1000 loss: 2.704338848590851
Epoch 883 completed out of 1000 loss: 2.7043121457099915
Epoch 884 completed out of 1000 loss: 2.704281508922577
Epoch 885 completed out of 1000 loss: 2.7042534947395325
Epoch 886 completed out of 1000 loss: 2.704223930835724
Epoch 887 completed out of 1000 loss: 2.7041971683502197
Epoch 888 completed out of 1000 loss: 2.7041673064231873
Epoch 889 completed out of 1000 loss: 2.704138398170471
Epoch 890 completed out of 1000 loss: 2.704108953475952
Epoch 891 completed out of 1000 loss: 2.7040809988975525
Epoch 892 completed out of 1000 loss: 2.704051911830902
Epoch 893 completed out of 1000 loss: 2.7040229439735413
Epoch 894 completed out of 1000 loss: 2.7039933800697327
Epoch 895 completed out of 1000 loss: 2.703965127468109
Epoch 896 completed out of 1000 loss: 2.703935205936432
Epoch 897 completed out of 1000 loss: 2.7039068937301636
Epoch 898 completed out of 1000 loss: 2.7038758397102356
Epoch 899 completed out of 1000 loss: 2.703848898410797
Epoch 900 completed out of 1000 loss: 2.703817129135132
Epoch 901 completed out of 1000 loss: 2.703787922859192
Epoch 902 completed out of 1000 loss: 2.7037567496299744
Epoch 903 completed out of 1000 loss: 2.703727126121521
Epoch 904 completed out of 1000 loss: 2.703696608543396
Epoch 905 completed out of 1000 loss: 2.7036682963371277
Epoch 906 completed out of 1000 loss: 2.7036368250846863
Epoch 907 completed out of 1000 loss: 2.703606903553009
Epoch 908 completed out of 1000 loss: 2.7035780549049377
Epoch 909 completed out of 1000 loss: 2.7035497426986694
Epoch 910 completed out of 1000 loss: 2.7035199999809265
Epoch 911 completed out of 1000 loss: 2.7034908533096313
Epoch 912 completed out of 1000 loss: 2.7034634947776794
Epoch 913 completed out of 1000 loss: 2.7034361362457275
Epoch 914 completed out of 1000 loss: 2.7034066319465637
Epoch 915 completed out of 1000 loss: 2.7033786177635193
Epoch 916 completed out of 1000 loss: 2.7033494114875793
Epoch 917 completed out of 1000 loss: 2.703323543071747
Epoch 918 completed out of 1000 loss: 2.703294575214386
Epoch 919 completed out of 1000 loss: 2.7032670974731445
Epoch 920 completed out of 1000 loss: 2.703239142894745
Epoch 921 completed out of 1000 loss: 2.703212559223175
Epoch 922 completed out of 1000 loss: 2.7031858563423157
Epoch 923 completed out of 1000 loss: 2.7031564116477966
Epoch 924 completed out of 1000 loss: 2.703128933906555
Epoch 925 completed out of 1000 loss: 2.703100621700287
Epoch 926 completed out of 1000 loss: 2.703075647354126
Epoch 927 completed out of 1000 loss: 2.7030482292175293
Epoch 928 completed out of 1000 loss: 2.7030221223831177
Epoch 929 completed out of 1000 loss: 2.70299369096756
Epoch 930 completed out of 1000 loss: 2.7029659152030945
Epoch 931 completed out of 1000 loss: 2.7029402256011963
Epoch 932 completed out of 1000 loss: 2.7029110193252563
Epoch 933 completed out of 1000 loss: 2.702885627746582
Epoch 934 completed out of 1000 loss: 2.702856719493866
Epoch 935 completed out of 1000 loss: 2.7028306126594543
Epoch 936 completed out of 1000 loss: 2.7028030157089233
Epoch 937 completed out of 1000 loss: 2.702776074409485
Epoch 938 completed out of 1000 loss: 2.7027474641799927
Epoch 939 completed out of 1000 loss: 2.7027201652526855
Epoch 940 completed out of 1000 loss: 2.702692687511444
Epoch 941 completed out of 1000 loss: 2.7026641368865967
Epoch 942 completed out of 1000 loss: 2.7026347517967224
Epoch 943 completed out of 1000 loss: 2.7026079297065735
Epoch 944 completed out of 1000 loss: 2.702580451965332
Epoch 945 completed out of 1000 loss: 2.7025535106658936
Epoch 946 completed out of 1000 loss: 2.7025263905525208
Epoch 947 completed out of 1000 loss: 2.702498972415924
Epoch 948 completed out of 1000 loss: 2.7024722695350647
Epoch 949 completed out of 1000 loss: 2.702446460723877
Epoch 950 completed out of 1000 loss: 2.7024214267730713
Epoch 951 completed out of 1000 loss: 2.7023951411247253
Epoch 952 completed out of 1000 loss: 2.702368199825287
Epoch 953 completed out of 1000 loss: 2.7023417353630066
Epoch 954 completed out of 1000 loss: 2.7023170590400696
Epoch 955 completed out of 1000 loss: 2.70229035615921
Epoch 956 completed out of 1000 loss: 2.7022625207901
Epoch 957 completed out of 1000 loss: 2.7022367119789124
Epoch 958 completed out of 1000 loss: 2.702211558818817
Epoch 959 completed out of 1000 loss: 2.7021846771240234
Epoch 960 completed out of 1000 loss: 2.7021578550338745
Epoch 961 completed out of 1000 loss: 2.7021321654319763
Epoch 962 completed out of 1000 loss: 2.7021071910858154
Epoch 963 completed out of 1000 loss: 2.7020811438560486
Epoch 964 completed out of 1000 loss: 2.7020550966262817
Epoch 965 completed out of 1000 loss: 2.7020303606987
Epoch 966 completed out of 1000 loss: 2.702004373073578
Epoch 967 completed out of 1000 loss: 2.701980173587799
Epoch 968 completed out of 1000 loss: 2.701956570148468
Epoch 969 completed out of 1000 loss: 2.701929450035095
Epoch 970 completed out of 1000 loss: 2.7019049525260925
Epoch 971 completed out of 1000 loss: 2.701879620552063
Epoch 972 completed out of 1000 loss: 2.7018550038337708
Epoch 973 completed out of 1000 loss: 2.7018309831619263
Epoch 974 completed out of 1000 loss: 2.701805293560028
Epoch 975 completed out of 1000 loss: 2.7017818093299866
Epoch 976 completed out of 1000 loss: 2.7017564177513123
Epoch 977 completed out of 1000 loss: 2.701732635498047
Epoch 978 completed out of 1000 loss: 2.701706647872925
Epoch 979 completed out of 1000 loss: 2.7016834020614624
Epoch 980 completed out of 1000 loss: 2.7016587257385254
Epoch 981 completed out of 1000 loss: 2.701635003089905
Epoch 982 completed out of 1000 loss: 2.7016125321388245
Epoch 983 completed out of 1000 loss: 2.7015848755836487
Epoch 984 completed out of 1000 loss: 2.701561748981476
Epoch 985 completed out of 1000 loss: 2.70153671503067
Epoch 986 completed out of 1000 loss: 2.701513648033142
Epoch 987 completed out of 1000 loss: 2.701489269733429
Epoch 988 completed out of 1000 loss: 2.701464891433716
Epoch 989 completed out of 1000 loss: 2.701441764831543
Epoch 990 completed out of 1000 loss: 2.7014156579971313
Epoch 991 completed out of 1000 loss: 2.7013919949531555
Epoch 992 completed out of 1000 loss: 2.7013667821884155
Epoch 993 completed out of 1000 loss: 2.7013439536094666
Epoch 994 completed out of 1000 loss: 2.7013195157051086
Epoch 995 completed out of 1000 loss: 2.7012946009635925
Epoch 996 completed out of 1000 loss: 2.701272189617157
Epoch 997 completed out of 1000 loss: 2.7012466192245483
Epoch 998 completed out of 1000 loss: 2.701223850250244
Epoch 999 completed out of 1000 loss: 2.7011985182762146
Tensor("Softmax_3:0", shape=(?, 2), dtype=float32)
[[0.60608065 0.39391938]
 [0.3265769  0.6734231 ]
 [0.25258026 0.7474198 ]
 ...
 [0.74563813 0.2543619 ]
 [0.80682033 0.19317965]
 [0.69773877 0.30226126]]
False
accuracy on train: 0.5752992
Accuracy on test : 0.5625
[[1. 0.]
 [1. 0.]
 [0. 1.]
 ...
 [0. 1.]
 [0. 1.]
 [0. 1.]]
In [121]:
cm8 = confusion_matrix(df_yd,predy4)
plt.figure(figsize = (10,7))
cm8df = pd.DataFrame(cm8, index = ["1",'0'], columns = ["1",'0'])
seaborn.set(font_scale=1.4)#for label size
seaborn.heatmap(cm8df, annot=True,annot_kws={"size": 16})# font size
Out[121]:
<matplotlib.axes._subplots.AxesSubplot at 0x20f0c520ac8>
In [122]:
fpr, tpr, _ = roc_curve(predy4, df_yd, drop_intermediate=False)
plt.plot(fpr, tpr, color='red')
plt.xlabel('fpr')
plt.ylabel('tpr')
plt.title('ROC curve fr Tensorflow 4')
plt.show()
In [123]:
losslist5,predy5 = train_neural_network2(x, dfabcd, df_yabcdten, dfe, df_yeten)
predy5 = np.argmax(predy5,axis=1)
Epoch 0 completed out of 1000 loss: 3.3060057759284973
Epoch 1 completed out of 1000 loss: 3.303226888179779
Epoch 2 completed out of 1000 loss: 3.3007110357284546
Epoch 3 completed out of 1000 loss: 3.2984734773635864
Epoch 4 completed out of 1000 loss: 3.296467900276184
Epoch 5 completed out of 1000 loss: 3.2946465015411377
Epoch 6 completed out of 1000 loss: 3.2929041385650635
Epoch 7 completed out of 1000 loss: 3.2912084460258484
Epoch 8 completed out of 1000 loss: 3.28957736492157
Epoch 9 completed out of 1000 loss: 3.2879775166511536
Epoch 10 completed out of 1000 loss: 3.2863571643829346
Epoch 11 completed out of 1000 loss: 3.284720301628113
Epoch 12 completed out of 1000 loss: 3.283039629459381
Epoch 13 completed out of 1000 loss: 3.2812997698783875
Epoch 14 completed out of 1000 loss: 3.2795165181159973
Epoch 15 completed out of 1000 loss: 3.277707040309906
Epoch 16 completed out of 1000 loss: 3.2758708596229553
Epoch 17 completed out of 1000 loss: 3.273929178714752
Epoch 18 completed out of 1000 loss: 3.2718665599823
Epoch 19 completed out of 1000 loss: 3.2696735858917236
Epoch 20 completed out of 1000 loss: 3.2673205137252808
Epoch 21 completed out of 1000 loss: 3.2647277116775513
Epoch 22 completed out of 1000 loss: 3.261822462081909
Epoch 23 completed out of 1000 loss: 3.258534789085388
Epoch 24 completed out of 1000 loss: 3.254734516143799
Epoch 25 completed out of 1000 loss: 3.2502986192703247
Epoch 26 completed out of 1000 loss: 3.2450186610221863
Epoch 27 completed out of 1000 loss: 3.2386258244514465
Epoch 28 completed out of 1000 loss: 3.230809211730957
Epoch 29 completed out of 1000 loss: 3.2217008471488953
Epoch 30 completed out of 1000 loss: 3.2113569378852844
Epoch 31 completed out of 1000 loss: 3.1998665928840637
Epoch 32 completed out of 1000 loss: 3.1877553462982178
Epoch 33 completed out of 1000 loss: 3.176127851009369
Epoch 34 completed out of 1000 loss: 3.165799558162689
Epoch 35 completed out of 1000 loss: 3.157232165336609
Epoch 36 completed out of 1000 loss: 3.1507761478424072
Epoch 37 completed out of 1000 loss: 3.14630264043808
Epoch 38 completed out of 1000 loss: 3.1425148844718933
Epoch 39 completed out of 1000 loss: 3.1393091082572937
Epoch 40 completed out of 1000 loss: 3.1363402605056763
Epoch 41 completed out of 1000 loss: 3.1332375407218933
Epoch 42 completed out of 1000 loss: 3.1303059458732605
Epoch 43 completed out of 1000 loss: 3.127668857574463
Epoch 44 completed out of 1000 loss: 3.125112295150757
Epoch 45 completed out of 1000 loss: 3.1227787733078003
Epoch 46 completed out of 1000 loss: 3.120609760284424
Epoch 47 completed out of 1000 loss: 3.1185795664787292
Epoch 48 completed out of 1000 loss: 3.1165562868118286
Epoch 49 completed out of 1000 loss: 3.1143950819969177
Epoch 50 completed out of 1000 loss: 3.112329840660095
Epoch 51 completed out of 1000 loss: 3.110356628894806
Epoch 52 completed out of 1000 loss: 3.1084612011909485
Epoch 53 completed out of 1000 loss: 3.1065973043441772
Epoch 54 completed out of 1000 loss: 3.104727625846863
Epoch 55 completed out of 1000 loss: 3.1029399633407593
Epoch 56 completed out of 1000 loss: 3.1012904047966003
Epoch 57 completed out of 1000 loss: 3.099664330482483
Epoch 58 completed out of 1000 loss: 3.09809273481369
Epoch 59 completed out of 1000 loss: 3.0966184735298157
Epoch 60 completed out of 1000 loss: 3.0951889753341675
Epoch 61 completed out of 1000 loss: 3.0938183069229126
Epoch 62 completed out of 1000 loss: 3.0925709009170532
Epoch 63 completed out of 1000 loss: 3.0913882851600647
Epoch 64 completed out of 1000 loss: 3.090209126472473
Epoch 65 completed out of 1000 loss: 3.0890918970108032
Epoch 66 completed out of 1000 loss: 3.088005483150482
Epoch 67 completed out of 1000 loss: 3.0867181420326233
Epoch 68 completed out of 1000 loss: 3.085155665874481
Epoch 69 completed out of 1000 loss: 3.083563983440399
Epoch 70 completed out of 1000 loss: 3.082196056842804
Epoch 71 completed out of 1000 loss: 3.0812880396842957
Epoch 72 completed out of 1000 loss: 3.080368936061859
Epoch 73 completed out of 1000 loss: 3.079400420188904
Epoch 74 completed out of 1000 loss: 3.078543245792389
Epoch 75 completed out of 1000 loss: 3.0775551795959473
Epoch 76 completed out of 1000 loss: 3.076660931110382
Epoch 77 completed out of 1000 loss: 3.07595556974411
Epoch 78 completed out of 1000 loss: 3.075356662273407
Epoch 79 completed out of 1000 loss: 3.074774146080017
Epoch 80 completed out of 1000 loss: 3.074198007583618
Epoch 81 completed out of 1000 loss: 3.073625385761261
Epoch 82 completed out of 1000 loss: 3.073008894920349
Epoch 83 completed out of 1000 loss: 3.0722862482070923
Epoch 84 completed out of 1000 loss: 3.071673810482025
Epoch 85 completed out of 1000 loss: 3.0711217522621155
Epoch 86 completed out of 1000 loss: 3.070560395717621
Epoch 87 completed out of 1000 loss: 3.069896697998047
Epoch 88 completed out of 1000 loss: 3.0692819952964783
Epoch 89 completed out of 1000 loss: 3.068695902824402
Epoch 90 completed out of 1000 loss: 3.0681172013282776
Epoch 91 completed out of 1000 loss: 3.067403197288513
Epoch 92 completed out of 1000 loss: 3.066647529602051
Epoch 93 completed out of 1000 loss: 3.065980553627014
Epoch 94 completed out of 1000 loss: 3.0652841925621033
Epoch 95 completed out of 1000 loss: 3.0644525289535522
Epoch 96 completed out of 1000 loss: 3.063753366470337
Epoch 97 completed out of 1000 loss: 3.0630788803100586
Epoch 98 completed out of 1000 loss: 3.062500238418579
Epoch 99 completed out of 1000 loss: 3.0620601177215576
Epoch 100 completed out of 1000 loss: 3.061627984046936
Epoch 101 completed out of 1000 loss: 3.06117445230484
Epoch 102 completed out of 1000 loss: 3.0608004927635193
Epoch 103 completed out of 1000 loss: 3.060386598110199
Epoch 104 completed out of 1000 loss: 3.059895873069763
Epoch 105 completed out of 1000 loss: 3.059428870677948
Epoch 106 completed out of 1000 loss: 3.059010863304138
Epoch 107 completed out of 1000 loss: 3.0585827827453613
Epoch 108 completed out of 1000 loss: 3.058181583881378
Epoch 109 completed out of 1000 loss: 3.0577253103256226
Epoch 110 completed out of 1000 loss: 3.0572339296340942
Epoch 111 completed out of 1000 loss: 3.05685293674469
Epoch 112 completed out of 1000 loss: 3.0565191507339478
Epoch 113 completed out of 1000 loss: 3.0561869740486145
Epoch 114 completed out of 1000 loss: 3.055859386920929
Epoch 115 completed out of 1000 loss: 3.055571496486664
Epoch 116 completed out of 1000 loss: 3.0552788376808167
Epoch 117 completed out of 1000 loss: 3.0549848675727844
Epoch 118 completed out of 1000 loss: 3.054671585559845
Epoch 119 completed out of 1000 loss: 3.054349660873413
Epoch 120 completed out of 1000 loss: 3.0540329813957214
Epoch 121 completed out of 1000 loss: 3.0537149906158447
Epoch 122 completed out of 1000 loss: 3.0533937215805054
Epoch 123 completed out of 1000 loss: 3.052985370159149
Epoch 124 completed out of 1000 loss: 3.0525142550468445
Epoch 125 completed out of 1000 loss: 3.052103281021118
Epoch 126 completed out of 1000 loss: 3.0517600774765015
Epoch 127 completed out of 1000 loss: 3.0514280200004578
Epoch 128 completed out of 1000 loss: 3.0509522557258606
Epoch 129 completed out of 1000 loss: 3.0504215955734253
Epoch 130 completed out of 1000 loss: 3.0500091910362244
Epoch 131 completed out of 1000 loss: 3.0497254729270935
Epoch 132 completed out of 1000 loss: 3.049444019794464
Epoch 133 completed out of 1000 loss: 3.049093186855316
Epoch 134 completed out of 1000 loss: 3.048729717731476
Epoch 135 completed out of 1000 loss: 3.048433482646942
Epoch 136 completed out of 1000 loss: 3.0481775999069214
Epoch 137 completed out of 1000 loss: 3.0479289889335632
Epoch 138 completed out of 1000 loss: 3.047689974308014
Epoch 139 completed out of 1000 loss: 3.0474244952201843
Epoch 140 completed out of 1000 loss: 3.0471039414405823
Epoch 141 completed out of 1000 loss: 3.0468598008155823
Epoch 142 completed out of 1000 loss: 3.046668827533722
Epoch 143 completed out of 1000 loss: 3.0464730262756348
Epoch 144 completed out of 1000 loss: 3.0462488532066345
Epoch 145 completed out of 1000 loss: 3.0460293889045715
Epoch 146 completed out of 1000 loss: 3.0458196997642517
Epoch 147 completed out of 1000 loss: 3.045607626438141
Epoch 148 completed out of 1000 loss: 3.045408844947815
Epoch 149 completed out of 1000 loss: 3.0452122688293457
Epoch 150 completed out of 1000 loss: 3.045008957386017
Epoch 151 completed out of 1000 loss: 3.0448077917099
Epoch 152 completed out of 1000 loss: 3.044609308242798
Epoch 153 completed out of 1000 loss: 3.0444122552871704
Epoch 154 completed out of 1000 loss: 3.044201076030731
Epoch 155 completed out of 1000 loss: 3.0439983010292053
Epoch 156 completed out of 1000 loss: 3.0437856912612915
Epoch 157 completed out of 1000 loss: 3.0435768365859985
Epoch 158 completed out of 1000 loss: 3.0433590412139893
Epoch 159 completed out of 1000 loss: 3.0431379079818726
Epoch 160 completed out of 1000 loss: 3.042909622192383
Epoch 161 completed out of 1000 loss: 3.04267817735672
Epoch 162 completed out of 1000 loss: 3.0424320697784424
Epoch 163 completed out of 1000 loss: 3.042177200317383
Epoch 164 completed out of 1000 loss: 3.0418747663497925
Epoch 165 completed out of 1000 loss: 3.041516602039337
Epoch 166 completed out of 1000 loss: 3.0412296652793884
Epoch 167 completed out of 1000 loss: 3.040968656539917
Epoch 168 completed out of 1000 loss: 3.040710210800171
Epoch 169 completed out of 1000 loss: 3.0404430627822876
Epoch 170 completed out of 1000 loss: 3.0401805639266968
Epoch 171 completed out of 1000 loss: 3.039915919303894
Epoch 172 completed out of 1000 loss: 3.039655566215515
Epoch 173 completed out of 1000 loss: 3.0393912196159363
Epoch 174 completed out of 1000 loss: 3.039131760597229
Epoch 175 completed out of 1000 loss: 3.038876950740814
Epoch 176 completed out of 1000 loss: 3.038611352443695
Epoch 177 completed out of 1000 loss: 3.0383525490760803
Epoch 178 completed out of 1000 loss: 3.0380902886390686
Epoch 179 completed out of 1000 loss: 3.037830173969269
Epoch 180 completed out of 1000 loss: 3.037570357322693
Epoch 181 completed out of 1000 loss: 3.037308394908905
Epoch 182 completed out of 1000 loss: 3.0370453000068665
Epoch 183 completed out of 1000 loss: 3.0367788672447205
Epoch 184 completed out of 1000 loss: 3.0365172028541565
Epoch 185 completed out of 1000 loss: 3.036245048046112
Epoch 186 completed out of 1000 loss: 3.0359777212142944
Epoch 187 completed out of 1000 loss: 3.0357008576393127
Epoch 188 completed out of 1000 loss: 3.0354244709014893
Epoch 189 completed out of 1000 loss: 3.0351377725601196
Epoch 190 completed out of 1000 loss: 3.034839689731598
Epoch 191 completed out of 1000 loss: 3.03453528881073
Epoch 192 completed out of 1000 loss: 3.034219443798065
Epoch 193 completed out of 1000 loss: 3.033908426761627
Epoch 194 completed out of 1000 loss: 3.0335910320281982
Epoch 195 completed out of 1000 loss: 3.0332690477371216
Epoch 196 completed out of 1000 loss: 3.032938599586487
Epoch 197 completed out of 1000 loss: 3.032601535320282
Epoch 198 completed out of 1000 loss: 3.0322605967521667
Epoch 199 completed out of 1000 loss: 3.0319080352783203
Epoch 200 completed out of 1000 loss: 3.0315456986427307
Epoch 201 completed out of 1000 loss: 3.0311757922172546
Epoch 202 completed out of 1000 loss: 3.030792713165283
Epoch 203 completed out of 1000 loss: 3.0303980708122253
Epoch 204 completed out of 1000 loss: 3.029992699623108
Epoch 205 completed out of 1000 loss: 3.0295687317848206
Epoch 206 completed out of 1000 loss: 3.0291202068328857
Epoch 207 completed out of 1000 loss: 3.0286648869514465
Epoch 208 completed out of 1000 loss: 3.0282185077667236
Epoch 209 completed out of 1000 loss: 3.027775764465332
Epoch 210 completed out of 1000 loss: 3.027322471141815
Epoch 211 completed out of 1000 loss: 3.026862621307373
Epoch 212 completed out of 1000 loss: 3.0263947248458862
Epoch 213 completed out of 1000 loss: 3.0259239077568054
Epoch 214 completed out of 1000 loss: 3.025466203689575
Epoch 215 completed out of 1000 loss: 3.025010585784912
Epoch 216 completed out of 1000 loss: 3.0245611667633057
Epoch 217 completed out of 1000 loss: 3.0241116285324097
Epoch 218 completed out of 1000 loss: 3.0236682295799255
Epoch 219 completed out of 1000 loss: 3.023217499256134
Epoch 220 completed out of 1000 loss: 3.022723138332367
Epoch 221 completed out of 1000 loss: 3.0223315358161926
Epoch 222 completed out of 1000 loss: 3.021944761276245
Epoch 223 completed out of 1000 loss: 3.0215622782707214
Epoch 224 completed out of 1000 loss: 3.0211966037750244
Epoch 225 completed out of 1000 loss: 3.020848512649536
Epoch 226 completed out of 1000 loss: 3.0205137133598328
Epoch 227 completed out of 1000 loss: 3.02019065618515
Epoch 228 completed out of 1000 loss: 3.0198771357536316
Epoch 229 completed out of 1000 loss: 3.019577205181122
Epoch 230 completed out of 1000 loss: 3.0192869305610657
Epoch 231 completed out of 1000 loss: 3.019011616706848
Epoch 232 completed out of 1000 loss: 3.0187425017356873
Epoch 233 completed out of 1000 loss: 3.018488109111786
Epoch 234 completed out of 1000 loss: 3.018241763114929
Epoch 235 completed out of 1000 loss: 3.0180028676986694
Epoch 236 completed out of 1000 loss: 3.017768442630768
Epoch 237 completed out of 1000 loss: 3.0175384879112244
Epoch 238 completed out of 1000 loss: 3.0173213481903076
Epoch 239 completed out of 1000 loss: 3.01710307598114
Epoch 240 completed out of 1000 loss: 3.016888201236725
Epoch 241 completed out of 1000 loss: 3.0166727900505066
Epoch 242 completed out of 1000 loss: 3.016459345817566
Epoch 243 completed out of 1000 loss: 3.0162420868873596
Epoch 244 completed out of 1000 loss: 3.0160366892814636
Epoch 245 completed out of 1000 loss: 3.0158276557922363
Epoch 246 completed out of 1000 loss: 3.0156152844429016
Epoch 247 completed out of 1000 loss: 3.0154099464416504
Epoch 248 completed out of 1000 loss: 3.0152061581611633
Epoch 249 completed out of 1000 loss: 3.015003740787506
Epoch 250 completed out of 1000 loss: 3.01479572057724
Epoch 251 completed out of 1000 loss: 3.0145865082740784
Epoch 252 completed out of 1000 loss: 3.0143765211105347
Epoch 253 completed out of 1000 loss: 3.0141605138778687
Epoch 254 completed out of 1000 loss: 3.013945758342743
Epoch 255 completed out of 1000 loss: 3.0137272477149963
Epoch 256 completed out of 1000 loss: 3.013505220413208
Epoch 257 completed out of 1000 loss: 3.0132761001586914
Epoch 258 completed out of 1000 loss: 3.0130483508110046
Epoch 259 completed out of 1000 loss: 3.0128089785575867
Epoch 260 completed out of 1000 loss: 3.012569487094879
Epoch 261 completed out of 1000 loss: 3.012315034866333
Epoch 262 completed out of 1000 loss: 3.0120584964752197
Epoch 263 completed out of 1000 loss: 3.0117883682250977
Epoch 264 completed out of 1000 loss: 3.01150643825531
Epoch 265 completed out of 1000 loss: 3.01121187210083
Epoch 266 completed out of 1000 loss: 3.0109039545059204
Epoch 267 completed out of 1000 loss: 3.010583519935608
Epoch 268 completed out of 1000 loss: 3.0102459192276
Epoch 269 completed out of 1000 loss: 3.009902238845825
Epoch 270 completed out of 1000 loss: 3.009536921977997
Epoch 271 completed out of 1000 loss: 3.0091534852981567
Epoch 272 completed out of 1000 loss: 3.0087506771087646
Epoch 273 completed out of 1000 loss: 3.008317291736603
Epoch 274 completed out of 1000 loss: 3.0078574419021606
Epoch 275 completed out of 1000 loss: 3.007354497909546
Epoch 276 completed out of 1000 loss: 3.006815195083618
Epoch 277 completed out of 1000 loss: 3.006230592727661
Epoch 278 completed out of 1000 loss: 3.0056263208389282
Epoch 279 completed out of 1000 loss: 3.0049859285354614
Epoch 280 completed out of 1000 loss: 3.004310429096222
Epoch 281 completed out of 1000 loss: 3.0035977363586426
Epoch 282 completed out of 1000 loss: 3.002861738204956
Epoch 283 completed out of 1000 loss: 3.002110242843628
Epoch 284 completed out of 1000 loss: 3.0013479590415955
Epoch 285 completed out of 1000 loss: 3.000603139400482
Epoch 286 completed out of 1000 loss: 2.9998850226402283
Epoch 287 completed out of 1000 loss: 2.9992056488990784
Epoch 288 completed out of 1000 loss: 2.9985691905021667
Epoch 289 completed out of 1000 loss: 2.9979820251464844
Epoch 290 completed out of 1000 loss: 2.997442603111267
Epoch 291 completed out of 1000 loss: 2.996947407722473
Epoch 292 completed out of 1000 loss: 2.9964939951896667
Epoch 293 completed out of 1000 loss: 2.9960712790489197
Epoch 294 completed out of 1000 loss: 2.9956740736961365
Epoch 295 completed out of 1000 loss: 2.995292901992798
Epoch 296 completed out of 1000 loss: 2.994922697544098
Epoch 297 completed out of 1000 loss: 2.994540214538574
Epoch 298 completed out of 1000 loss: 2.9941580295562744
Epoch 299 completed out of 1000 loss: 2.993791401386261
Epoch 300 completed out of 1000 loss: 2.9933886528015137
Epoch 301 completed out of 1000 loss: 2.9929926991462708
Epoch 302 completed out of 1000 loss: 2.9926276803016663
Epoch 303 completed out of 1000 loss: 2.9923203587532043
Epoch 304 completed out of 1000 loss: 2.9919946789741516
Epoch 305 completed out of 1000 loss: 2.991676390171051
Epoch 306 completed out of 1000 loss: 2.991368889808655
Epoch 307 completed out of 1000 loss: 2.9910582304000854
Epoch 308 completed out of 1000 loss: 2.990743577480316
Epoch 309 completed out of 1000 loss: 2.990421235561371
Epoch 310 completed out of 1000 loss: 2.9900986552238464
Epoch 311 completed out of 1000 loss: 2.9897704124450684
Epoch 312 completed out of 1000 loss: 2.989421844482422
Epoch 313 completed out of 1000 loss: 2.989058256149292
Epoch 314 completed out of 1000 loss: 2.988704741001129
Epoch 315 completed out of 1000 loss: 2.9883808493614197
Epoch 316 completed out of 1000 loss: 2.988061010837555
Epoch 317 completed out of 1000 loss: 2.9877371788024902
Epoch 318 completed out of 1000 loss: 2.9874115586280823
Epoch 319 completed out of 1000 loss: 2.987084746360779
Epoch 320 completed out of 1000 loss: 2.9867560267448425
Epoch 321 completed out of 1000 loss: 2.986425817012787
Epoch 322 completed out of 1000 loss: 2.986106216907501
Epoch 323 completed out of 1000 loss: 2.9857800006866455
Epoch 324 completed out of 1000 loss: 2.9854546189308167
Epoch 325 completed out of 1000 loss: 2.985137403011322
Epoch 326 completed out of 1000 loss: 2.9848194122314453
Epoch 327 completed out of 1000 loss: 2.98450368642807
Epoch 328 completed out of 1000 loss: 2.9841926097869873
Epoch 329 completed out of 1000 loss: 2.9838886857032776
Epoch 330 completed out of 1000 loss: 2.983585238456726
Epoch 331 completed out of 1000 loss: 2.9832934737205505
Epoch 332 completed out of 1000 loss: 2.9830029606819153
Epoch 333 completed out of 1000 loss: 2.9827210903167725
Epoch 334 completed out of 1000 loss: 2.9824430346488953
Epoch 335 completed out of 1000 loss: 2.9821709990501404
Epoch 336 completed out of 1000 loss: 2.981904983520508
Epoch 337 completed out of 1000 loss: 2.981641709804535
Epoch 338 completed out of 1000 loss: 2.9813833832740784
Epoch 339 completed out of 1000 loss: 2.9811240434646606
Epoch 340 completed out of 1000 loss: 2.980878710746765
Epoch 341 completed out of 1000 loss: 2.9806312322616577
Epoch 342 completed out of 1000 loss: 2.9803858399391174
Epoch 343 completed out of 1000 loss: 2.980143129825592
Epoch 344 completed out of 1000 loss: 2.9799010157585144
Epoch 345 completed out of 1000 loss: 2.9796628952026367
Epoch 346 completed out of 1000 loss: 2.9794188141822815
Epoch 347 completed out of 1000 loss: 2.9791698455810547
Epoch 348 completed out of 1000 loss: 2.9789271354675293
Epoch 349 completed out of 1000 loss: 2.9786805510520935
Epoch 350 completed out of 1000 loss: 2.978423774242401
Epoch 351 completed out of 1000 loss: 2.978164494037628
Epoch 352 completed out of 1000 loss: 2.9778982400894165
Epoch 353 completed out of 1000 loss: 2.977622330188751
Epoch 354 completed out of 1000 loss: 2.9773406982421875
Epoch 355 completed out of 1000 loss: 2.977036714553833
Epoch 356 completed out of 1000 loss: 2.9767252802848816
Epoch 357 completed out of 1000 loss: 2.976385533809662
Epoch 358 completed out of 1000 loss: 2.9760560393333435
Epoch 359 completed out of 1000 loss: 2.9757248759269714
Epoch 360 completed out of 1000 loss: 2.975385546684265
Epoch 361 completed out of 1000 loss: 2.9750384092330933
Epoch 362 completed out of 1000 loss: 2.9746749997138977
Epoch 363 completed out of 1000 loss: 2.974294602870941
Epoch 364 completed out of 1000 loss: 2.9739051461219788
Epoch 365 completed out of 1000 loss: 2.9735066294670105
Epoch 366 completed out of 1000 loss: 2.973101794719696
Epoch 367 completed out of 1000 loss: 2.972689390182495
Epoch 368 completed out of 1000 loss: 2.972286105155945
Epoch 369 completed out of 1000 loss: 2.971883773803711
Epoch 370 completed out of 1000 loss: 2.9714896082878113
Epoch 371 completed out of 1000 loss: 2.971110224723816
Epoch 372 completed out of 1000 loss: 2.9707430601119995
Epoch 373 completed out of 1000 loss: 2.97039133310318
Epoch 374 completed out of 1000 loss: 2.9700576663017273
Epoch 375 completed out of 1000 loss: 2.969731569290161
Epoch 376 completed out of 1000 loss: 2.969413697719574
Epoch 377 completed out of 1000 loss: 2.9691134691238403
Epoch 378 completed out of 1000 loss: 2.968805253505707
Epoch 379 completed out of 1000 loss: 2.9685081243515015
Epoch 380 completed out of 1000 loss: 2.9682032465934753
Epoch 381 completed out of 1000 loss: 2.96789813041687
Epoch 382 completed out of 1000 loss: 2.9675830006599426
Epoch 383 completed out of 1000 loss: 2.9672667384147644
Epoch 384 completed out of 1000 loss: 2.9669349789619446
Epoch 385 completed out of 1000 loss: 2.9665924310684204
Epoch 386 completed out of 1000 loss: 2.966238021850586
Epoch 387 completed out of 1000 loss: 2.9658621549606323
Epoch 388 completed out of 1000 loss: 2.965461492538452
Epoch 389 completed out of 1000 loss: 2.965041935443878
Epoch 390 completed out of 1000 loss: 2.964589476585388
Epoch 391 completed out of 1000 loss: 2.9641095995903015
Epoch 392 completed out of 1000 loss: 2.9635888934135437
Epoch 393 completed out of 1000 loss: 2.9630300402641296
Epoch 394 completed out of 1000 loss: 2.9624181985855103
Epoch 395 completed out of 1000 loss: 2.961751163005829
Epoch 396 completed out of 1000 loss: 2.9610267281532288
Epoch 397 completed out of 1000 loss: 2.9602293372154236
Epoch 398 completed out of 1000 loss: 2.959357500076294
Epoch 399 completed out of 1000 loss: 2.95840322971344
Epoch 400 completed out of 1000 loss: 2.95734840631485
Epoch 401 completed out of 1000 loss: 2.95619934797287
Epoch 402 completed out of 1000 loss: 2.954936683177948
Epoch 403 completed out of 1000 loss: 2.953549087047577
Epoch 404 completed out of 1000 loss: 2.9520444869995117
Epoch 405 completed out of 1000 loss: 2.9503848552703857
Epoch 406 completed out of 1000 loss: 2.9485087394714355
Epoch 407 completed out of 1000 loss: 2.9463738203048706
Epoch 408 completed out of 1000 loss: 2.9438893795013428
Epoch 409 completed out of 1000 loss: 2.9409070014953613
Epoch 410 completed out of 1000 loss: 2.9372490644454956
Epoch 411 completed out of 1000 loss: 2.932693839073181
Epoch 412 completed out of 1000 loss: 2.927065432071686
Epoch 413 completed out of 1000 loss: 2.920245349407196
Epoch 414 completed out of 1000 loss: 2.9125365614891052
Epoch 415 completed out of 1000 loss: 2.9043524861335754
Epoch 416 completed out of 1000 loss: 2.896538496017456
Epoch 417 completed out of 1000 loss: 2.8899139761924744
Epoch 418 completed out of 1000 loss: 2.88492614030838
Epoch 419 completed out of 1000 loss: 2.8815905451774597
Epoch 420 completed out of 1000 loss: 2.879429817199707
Epoch 421 completed out of 1000 loss: 2.877912700176239
Epoch 422 completed out of 1000 loss: 2.8767136335372925
Epoch 423 completed out of 1000 loss: 2.875710964202881
Epoch 424 completed out of 1000 loss: 2.8748990893363953
Epoch 425 completed out of 1000 loss: 2.874232530593872
Epoch 426 completed out of 1000 loss: 2.8736531138420105
Epoch 427 completed out of 1000 loss: 2.8731215596199036
Epoch 428 completed out of 1000 loss: 2.872582256793976
Epoch 429 completed out of 1000 loss: 2.8720341324806213
Epoch 430 completed out of 1000 loss: 2.8714751601219177
Epoch 431 completed out of 1000 loss: 2.8709185123443604
Epoch 432 completed out of 1000 loss: 2.8703649044036865
Epoch 433 completed out of 1000 loss: 2.869809865951538
Epoch 434 completed out of 1000 loss: 2.8692615032196045
Epoch 435 completed out of 1000 loss: 2.8687057495117188
Epoch 436 completed out of 1000 loss: 2.8681541681289673
Epoch 437 completed out of 1000 loss: 2.8675953149795532
Epoch 438 completed out of 1000 loss: 2.8670291900634766
Epoch 439 completed out of 1000 loss: 2.8664631843566895
Epoch 440 completed out of 1000 loss: 2.8658881783485413
Epoch 441 completed out of 1000 loss: 2.8653146624565125
Epoch 442 completed out of 1000 loss: 2.864736557006836
Epoch 443 completed out of 1000 loss: 2.8641554713249207
Epoch 444 completed out of 1000 loss: 2.8635687828063965
Epoch 445 completed out of 1000 loss: 2.8629706501960754
Epoch 446 completed out of 1000 loss: 2.8623672127723694
Epoch 447 completed out of 1000 loss: 2.8617530465126038
Epoch 448 completed out of 1000 loss: 2.8611289262771606
Epoch 449 completed out of 1000 loss: 2.860498011112213
Epoch 450 completed out of 1000 loss: 2.8598586320877075
Epoch 451 completed out of 1000 loss: 2.8592087030410767
Epoch 452 completed out of 1000 loss: 2.858549654483795
Epoch 453 completed out of 1000 loss: 2.8578768372535706
Epoch 454 completed out of 1000 loss: 2.857203483581543
Epoch 455 completed out of 1000 loss: 2.856515407562256
Epoch 456 completed out of 1000 loss: 2.855824589729309
Epoch 457 completed out of 1000 loss: 2.855122447013855
Epoch 458 completed out of 1000 loss: 2.8544148206710815
Epoch 459 completed out of 1000 loss: 2.853696644306183
Epoch 460 completed out of 1000 loss: 2.8529778122901917
Epoch 461 completed out of 1000 loss: 2.852249503135681
Epoch 462 completed out of 1000 loss: 2.851520359516144
Epoch 463 completed out of 1000 loss: 2.850784957408905
Epoch 464 completed out of 1000 loss: 2.8500518202781677
Epoch 465 completed out of 1000 loss: 2.849312484264374
Epoch 466 completed out of 1000 loss: 2.848569691181183
Epoch 467 completed out of 1000 loss: 2.8478273153305054
Epoch 468 completed out of 1000 loss: 2.8470847606658936
Epoch 469 completed out of 1000 loss: 2.8463385105133057
Epoch 470 completed out of 1000 loss: 2.8455904722213745
Epoch 471 completed out of 1000 loss: 2.8448485136032104
Epoch 472 completed out of 1000 loss: 2.8441056609153748
Epoch 473 completed out of 1000 loss: 2.8433637022972107
Epoch 474 completed out of 1000 loss: 2.8426275849342346
Epoch 475 completed out of 1000 loss: 2.841894030570984
Epoch 476 completed out of 1000 loss: 2.8411713242530823
Epoch 477 completed out of 1000 loss: 2.8404500484466553
Epoch 478 completed out of 1000 loss: 2.839738965034485
Epoch 479 completed out of 1000 loss: 2.839034140110016
Epoch 480 completed out of 1000 loss: 2.838340997695923
Epoch 481 completed out of 1000 loss: 2.8376585245132446
Epoch 482 completed out of 1000 loss: 2.8369895219802856
Epoch 483 completed out of 1000 loss: 2.8363309502601624
Epoch 484 completed out of 1000 loss: 2.8356874585151672
Epoch 485 completed out of 1000 loss: 2.835058569908142
Epoch 486 completed out of 1000 loss: 2.8344390392303467
Epoch 487 completed out of 1000 loss: 2.833837866783142
Epoch 488 completed out of 1000 loss: 2.8332475423812866
Epoch 489 completed out of 1000 loss: 2.832672119140625
Epoch 490 completed out of 1000 loss: 2.832109212875366
Epoch 491 completed out of 1000 loss: 2.831552028656006
Epoch 492 completed out of 1000 loss: 2.8310052156448364
Epoch 493 completed out of 1000 loss: 2.8304641246795654
Epoch 494 completed out of 1000 loss: 2.829927623271942
Epoch 495 completed out of 1000 loss: 2.829397439956665
Epoch 496 completed out of 1000 loss: 2.828870415687561
Epoch 497 completed out of 1000 loss: 2.8283549547195435
Epoch 498 completed out of 1000 loss: 2.827847898006439
Epoch 499 completed out of 1000 loss: 2.8273454308509827
Epoch 500 completed out of 1000 loss: 2.826835811138153
Epoch 501 completed out of 1000 loss: 2.8263326287269592
Epoch 502 completed out of 1000 loss: 2.8258163928985596
Epoch 503 completed out of 1000 loss: 2.8252919912338257
Epoch 504 completed out of 1000 loss: 2.824747145175934
Epoch 505 completed out of 1000 loss: 2.824181914329529
Epoch 506 completed out of 1000 loss: 2.823621928691864
Epoch 507 completed out of 1000 loss: 2.8230618834495544
Epoch 508 completed out of 1000 loss: 2.8224520087242126
Epoch 509 completed out of 1000 loss: 2.8218576312065125
Epoch 510 completed out of 1000 loss: 2.821239173412323
Epoch 511 completed out of 1000 loss: 2.8205974102020264
Epoch 512 completed out of 1000 loss: 2.8199381232261658
Epoch 513 completed out of 1000 loss: 2.8192750811576843
Epoch 514 completed out of 1000 loss: 2.818600356578827
Epoch 515 completed out of 1000 loss: 2.8178905248641968
Epoch 516 completed out of 1000 loss: 2.81716126203537
Epoch 517 completed out of 1000 loss: 2.81641161441803
Epoch 518 completed out of 1000 loss: 2.8156431317329407
Epoch 519 completed out of 1000 loss: 2.8148531913757324
Epoch 520 completed out of 1000 loss: 2.81404310464859
Epoch 521 completed out of 1000 loss: 2.8132174015045166
Epoch 522 completed out of 1000 loss: 2.812376856803894
Epoch 523 completed out of 1000 loss: 2.8115158677101135
Epoch 524 completed out of 1000 loss: 2.810635030269623
Epoch 525 completed out of 1000 loss: 2.809739828109741
Epoch 526 completed out of 1000 loss: 2.80882728099823
Epoch 527 completed out of 1000 loss: 2.8078985810279846
Epoch 528 completed out of 1000 loss: 2.8069618940353394
Epoch 529 completed out of 1000 loss: 2.806023895740509
Epoch 530 completed out of 1000 loss: 2.805091917514801
Epoch 531 completed out of 1000 loss: 2.8041794300079346
Epoch 532 completed out of 1000 loss: 2.8032965064048767
Epoch 533 completed out of 1000 loss: 2.802458167076111
Epoch 534 completed out of 1000 loss: 2.801665782928467
Epoch 535 completed out of 1000 loss: 2.800923466682434
Epoch 536 completed out of 1000 loss: 2.8002399802207947
Epoch 537 completed out of 1000 loss: 2.7996108531951904
Epoch 538 completed out of 1000 loss: 2.799042284488678
Epoch 539 completed out of 1000 loss: 2.7985251545906067
Epoch 540 completed out of 1000 loss: 2.7980578541755676
Epoch 541 completed out of 1000 loss: 2.7976359128952026
Epoch 542 completed out of 1000 loss: 2.79724657535553
Epoch 543 completed out of 1000 loss: 2.7968921065330505
Epoch 544 completed out of 1000 loss: 2.796560764312744
Epoch 545 completed out of 1000 loss: 2.7962498664855957
Epoch 546 completed out of 1000 loss: 2.7959578037261963
Epoch 547 completed out of 1000 loss: 2.795689344406128
Epoch 548 completed out of 1000 loss: 2.795438826084137
Epoch 549 completed out of 1000 loss: 2.7952059507369995
Epoch 550 completed out of 1000 loss: 2.794977903366089
Epoch 551 completed out of 1000 loss: 2.7947574257850647
Epoch 552 completed out of 1000 loss: 2.794548213481903
Epoch 553 completed out of 1000 loss: 2.794342279434204
Epoch 554 completed out of 1000 loss: 2.7941121459007263
Epoch 555 completed out of 1000 loss: 2.7938462495803833
Epoch 556 completed out of 1000 loss: 2.793555200099945
Epoch 557 completed out of 1000 loss: 2.7932775020599365
Epoch 558 completed out of 1000 loss: 2.7930931448936462
Epoch 559 completed out of 1000 loss: 2.7928967475891113
Epoch 560 completed out of 1000 loss: 2.79275119304657
Epoch 561 completed out of 1000 loss: 2.792611062526703
Epoch 562 completed out of 1000 loss: 2.7924845814704895
Epoch 563 completed out of 1000 loss: 2.792358696460724
Epoch 564 completed out of 1000 loss: 2.792219638824463
Epoch 565 completed out of 1000 loss: 2.7920877933502197
Epoch 566 completed out of 1000 loss: 2.791959285736084
Epoch 567 completed out of 1000 loss: 2.791820466518402
Epoch 568 completed out of 1000 loss: 2.791674256324768
Epoch 569 completed out of 1000 loss: 2.7915099263191223
Epoch 570 completed out of 1000 loss: 2.7913374304771423
Epoch 571 completed out of 1000 loss: 2.7911607027053833
Epoch 572 completed out of 1000 loss: 2.7910189032554626
Epoch 573 completed out of 1000 loss: 2.7908968329429626
Epoch 574 completed out of 1000 loss: 2.7907862067222595
Epoch 575 completed out of 1000 loss: 2.79068124294281
Epoch 576 completed out of 1000 loss: 2.79058301448822
Epoch 577 completed out of 1000 loss: 2.79047554731369
Epoch 578 completed out of 1000 loss: 2.790373921394348
Epoch 579 completed out of 1000 loss: 2.7902716398239136
Epoch 580 completed out of 1000 loss: 2.790169358253479
Epoch 581 completed out of 1000 loss: 2.7900699377059937
Epoch 582 completed out of 1000 loss: 2.78996479511261
Epoch 583 completed out of 1000 loss: 2.78986793756485
Epoch 584 completed out of 1000 loss: 2.789767026901245
Epoch 585 completed out of 1000 loss: 2.789669990539551
Epoch 586 completed out of 1000 loss: 2.7895700335502625
Epoch 587 completed out of 1000 loss: 2.7894654273986816
Epoch 588 completed out of 1000 loss: 2.789352834224701
Epoch 589 completed out of 1000 loss: 2.7892176508903503
Epoch 590 completed out of 1000 loss: 2.7890737652778625
Epoch 591 completed out of 1000 loss: 2.7889243960380554
Epoch 592 completed out of 1000 loss: 2.7887920141220093
Epoch 593 completed out of 1000 loss: 2.7886857390403748
Epoch 594 completed out of 1000 loss: 2.7885929942131042
Epoch 595 completed out of 1000 loss: 2.788498640060425
Epoch 596 completed out of 1000 loss: 2.7884050607681274
Epoch 597 completed out of 1000 loss: 2.788305163383484
Epoch 598 completed out of 1000 loss: 2.788209617137909
Epoch 599 completed out of 1000 loss: 2.788111984729767
Epoch 600 completed out of 1000 loss: 2.788011848926544
Epoch 601 completed out of 1000 loss: 2.7879172563552856
Epoch 602 completed out of 1000 loss: 2.787818670272827
Epoch 603 completed out of 1000 loss: 2.787721574306488
Epoch 604 completed out of 1000 loss: 2.7876235842704773
Epoch 605 completed out of 1000 loss: 2.7875264286994934
Epoch 606 completed out of 1000 loss: 2.787427604198456
Epoch 607 completed out of 1000 loss: 2.7873334884643555
Epoch 608 completed out of 1000 loss: 2.787235379219055
Epoch 609 completed out of 1000 loss: 2.787141799926758
Epoch 610 completed out of 1000 loss: 2.787043273448944
Epoch 611 completed out of 1000 loss: 2.7869467735290527
Epoch 612 completed out of 1000 loss: 2.7868507504463196
Epoch 613 completed out of 1000 loss: 2.7867523431777954
Epoch 614 completed out of 1000 loss: 2.7866517305374146
Epoch 615 completed out of 1000 loss: 2.786550283432007
Epoch 616 completed out of 1000 loss: 2.7864487767219543
Epoch 617 completed out of 1000 loss: 2.78634250164032
Epoch 618 completed out of 1000 loss: 2.7862362265586853
Epoch 619 completed out of 1000 loss: 2.7861278653144836
Epoch 620 completed out of 1000 loss: 2.7860147953033447
Epoch 621 completed out of 1000 loss: 2.7859041690826416
Epoch 622 completed out of 1000 loss: 2.785789430141449
Epoch 623 completed out of 1000 loss: 2.785672962665558
Epoch 624 completed out of 1000 loss: 2.785550355911255
Epoch 625 completed out of 1000 loss: 2.7854294180870056
Epoch 626 completed out of 1000 loss: 2.7853006720542908
Epoch 627 completed out of 1000 loss: 2.7851715087890625
Epoch 628 completed out of 1000 loss: 2.7850311994552612
Epoch 629 completed out of 1000 loss: 2.784891366958618
Epoch 630 completed out of 1000 loss: 2.7847485542297363
Epoch 631 completed out of 1000 loss: 2.784591317176819
Epoch 632 completed out of 1000 loss: 2.7844287753105164
Epoch 633 completed out of 1000 loss: 2.7842660546302795
Epoch 634 completed out of 1000 loss: 2.7840893268585205
Epoch 635 completed out of 1000 loss: 2.7839020490646362
Epoch 636 completed out of 1000 loss: 2.78371262550354
Epoch 637 completed out of 1000 loss: 2.7835090160369873
Epoch 638 completed out of 1000 loss: 2.7832953929901123
Epoch 639 completed out of 1000 loss: 2.7830690145492554
Epoch 640 completed out of 1000 loss: 2.782833218574524
Epoch 641 completed out of 1000 loss: 2.782579720020294
Epoch 642 completed out of 1000 loss: 2.78231143951416
Epoch 643 completed out of 1000 loss: 2.7820297479629517
Epoch 644 completed out of 1000 loss: 2.781731903553009
Epoch 645 completed out of 1000 loss: 2.7814103960990906
Epoch 646 completed out of 1000 loss: 2.7810739874839783
Epoch 647 completed out of 1000 loss: 2.7807137966156006
Epoch 648 completed out of 1000 loss: 2.780336618423462
Epoch 649 completed out of 1000 loss: 2.7799359560012817
Epoch 650 completed out of 1000 loss: 2.7795241475105286
Epoch 651 completed out of 1000 loss: 2.7790942192077637
Epoch 652 completed out of 1000 loss: 2.778650224208832
Epoch 653 completed out of 1000 loss: 2.778197228908539
Epoch 654 completed out of 1000 loss: 2.777744472026825
Epoch 655 completed out of 1000 loss: 2.7772932648658752
Epoch 656 completed out of 1000 loss: 2.7768560647964478
Epoch 657 completed out of 1000 loss: 2.7764328718185425
Epoch 658 completed out of 1000 loss: 2.776033937931061
Epoch 659 completed out of 1000 loss: 2.7756587862968445
Epoch 660 completed out of 1000 loss: 2.775310695171356
Epoch 661 completed out of 1000 loss: 2.7749916315078735
Epoch 662 completed out of 1000 loss: 2.774694800376892
Epoch 663 completed out of 1000 loss: 2.774429678916931
Epoch 664 completed out of 1000 loss: 2.7741861939430237
Epoch 665 completed out of 1000 loss: 2.7739579677581787
Epoch 666 completed out of 1000 loss: 2.773750603199005
Epoch 667 completed out of 1000 loss: 2.7735555171966553
Epoch 668 completed out of 1000 loss: 2.7733799815177917
Epoch 669 completed out of 1000 loss: 2.7732099890708923
Epoch 670 completed out of 1000 loss: 2.77304744720459
Epoch 671 completed out of 1000 loss: 2.7728919982910156
Epoch 672 completed out of 1000 loss: 2.7727394700050354
Epoch 673 completed out of 1000 loss: 2.772586703300476
Epoch 674 completed out of 1000 loss: 2.772402763366699
Epoch 675 completed out of 1000 loss: 2.7721489667892456
Epoch 676 completed out of 1000 loss: 2.771681547164917
Epoch 677 completed out of 1000 loss: 2.771427094936371
Epoch 678 completed out of 1000 loss: 2.7712000608444214
Epoch 679 completed out of 1000 loss: 2.7709569334983826
Epoch 680 completed out of 1000 loss: 2.7706812024116516
Epoch 681 completed out of 1000 loss: 2.770252525806427
Epoch 682 completed out of 1000 loss: 2.769614040851593
Epoch 683 completed out of 1000 loss: 2.769534111022949
Epoch 684 completed out of 1000 loss: 2.76943576335907
Epoch 685 completed out of 1000 loss: 2.7691965103149414
Epoch 686 completed out of 1000 loss: 2.7688523530960083
Epoch 687 completed out of 1000 loss: 2.768282651901245
Epoch 688 completed out of 1000 loss: 2.7679209113121033
Epoch 689 completed out of 1000 loss: 2.7677676677703857
Epoch 690 completed out of 1000 loss: 2.7676145434379578
Epoch 691 completed out of 1000 loss: 2.7674485445022583
Epoch 692 completed out of 1000 loss: 2.767259180545807
Epoch 693 completed out of 1000 loss: 2.7668948769569397
Epoch 694 completed out of 1000 loss: 2.7663636207580566
Epoch 695 completed out of 1000 loss: 2.7661189436912537
Epoch 696 completed out of 1000 loss: 2.765977144241333
Epoch 697 completed out of 1000 loss: 2.7657673358917236
Epoch 698 completed out of 1000 loss: 2.765511929988861
Epoch 699 completed out of 1000 loss: 2.765259563922882
Epoch 700 completed out of 1000 loss: 2.765034019947052
Epoch 701 completed out of 1000 loss: 2.7647935152053833
Epoch 702 completed out of 1000 loss: 2.764460861682892
Epoch 703 completed out of 1000 loss: 2.764128088951111
Epoch 704 completed out of 1000 loss: 2.7638407349586487
Epoch 705 completed out of 1000 loss: 2.763579845428467
Epoch 706 completed out of 1000 loss: 2.763250172138214
Epoch 707 completed out of 1000 loss: 2.762748897075653
Epoch 708 completed out of 1000 loss: 2.7622509002685547
Epoch 709 completed out of 1000 loss: 2.7619264721870422
Epoch 710 completed out of 1000 loss: 2.7615405321121216
Epoch 711 completed out of 1000 loss: 2.7611584663391113
Epoch 712 completed out of 1000 loss: 2.7607847452163696
Epoch 713 completed out of 1000 loss: 2.760335922241211
Epoch 714 completed out of 1000 loss: 2.7598226070404053
Epoch 715 completed out of 1000 loss: 2.759302079677582
Epoch 716 completed out of 1000 loss: 2.7588693499565125
Epoch 717 completed out of 1000 loss: 2.7584277987480164
Epoch 718 completed out of 1000 loss: 2.757853627204895
Epoch 719 completed out of 1000 loss: 2.7571677565574646
Epoch 720 completed out of 1000 loss: 2.7565507292747498
Epoch 721 completed out of 1000 loss: 2.7559439539909363
Epoch 722 completed out of 1000 loss: 2.7551454305648804
Epoch 723 completed out of 1000 loss: 2.7544217109680176
Epoch 724 completed out of 1000 loss: 2.753801167011261
Epoch 725 completed out of 1000 loss: 2.7531842589378357
Epoch 726 completed out of 1000 loss: 2.7524118423461914
Epoch 727 completed out of 1000 loss: 2.751652419567108
Epoch 728 completed out of 1000 loss: 2.751031517982483
Epoch 729 completed out of 1000 loss: 2.750426769256592
Epoch 730 completed out of 1000 loss: 2.7496421933174133
Epoch 731 completed out of 1000 loss: 2.748967230319977
Epoch 732 completed out of 1000 loss: 2.7484013438224792
Epoch 733 completed out of 1000 loss: 2.7478166818618774
Epoch 734 completed out of 1000 loss: 2.7472298741340637
Epoch 735 completed out of 1000 loss: 2.746634304523468
Epoch 736 completed out of 1000 loss: 2.7460285425186157
Epoch 737 completed out of 1000 loss: 2.7453854084014893
Epoch 738 completed out of 1000 loss: 2.744758725166321
Epoch 739 completed out of 1000 loss: 2.7442156076431274
Epoch 740 completed out of 1000 loss: 2.7436798810958862
Epoch 741 completed out of 1000 loss: 2.7431463599205017
Epoch 742 completed out of 1000 loss: 2.7426193356513977
Epoch 743 completed out of 1000 loss: 2.74210524559021
Epoch 744 completed out of 1000 loss: 2.7416117787361145
Epoch 745 completed out of 1000 loss: 2.7411365509033203
Epoch 746 completed out of 1000 loss: 2.7406821250915527
Epoch 747 completed out of 1000 loss: 2.740249454975128
Epoch 748 completed out of 1000 loss: 2.7398350834846497
Epoch 749 completed out of 1000 loss: 2.7394497394561768
Epoch 750 completed out of 1000 loss: 2.739092767238617
Epoch 751 completed out of 1000 loss: 2.738757908344269
Epoch 752 completed out of 1000 loss: 2.738449454307556
Epoch 753 completed out of 1000 loss: 2.73816055059433
Epoch 754 completed out of 1000 loss: 2.73789644241333
Epoch 755 completed out of 1000 loss: 2.7376503944396973
Epoch 756 completed out of 1000 loss: 2.7374263405799866
Epoch 757 completed out of 1000 loss: 2.737216353416443
Epoch 758 completed out of 1000 loss: 2.7370213866233826
Epoch 759 completed out of 1000 loss: 2.7368454933166504
Epoch 760 completed out of 1000 loss: 2.7366823554039
Epoch 761 completed out of 1000 loss: 2.736526370048523
Epoch 762 completed out of 1000 loss: 2.736386775970459
Epoch 763 completed out of 1000 loss: 2.7362532019615173
Epoch 764 completed out of 1000 loss: 2.7361260652542114
Epoch 765 completed out of 1000 loss: 2.7360129356384277
Epoch 766 completed out of 1000 loss: 2.735902488231659
Epoch 767 completed out of 1000 loss: 2.7358019948005676
Epoch 768 completed out of 1000 loss: 2.735703706741333
Epoch 769 completed out of 1000 loss: 2.7356101870536804
Epoch 770 completed out of 1000 loss: 2.735526144504547
Epoch 771 completed out of 1000 loss: 2.735440254211426
Epoch 772 completed out of 1000 loss: 2.735357344150543
Epoch 773 completed out of 1000 loss: 2.7352858781814575
Epoch 774 completed out of 1000 loss: 2.735211670398712
Epoch 775 completed out of 1000 loss: 2.7351365089416504
Epoch 776 completed out of 1000 loss: 2.7350719571113586
Epoch 777 completed out of 1000 loss: 2.7350035905838013
Epoch 778 completed out of 1000 loss: 2.7349435091018677
Epoch 779 completed out of 1000 loss: 2.734881639480591
Epoch 780 completed out of 1000 loss: 2.73482209444046
Epoch 781 completed out of 1000 loss: 2.7347623705863953
Epoch 782 completed out of 1000 loss: 2.7347012758255005
Epoch 783 completed out of 1000 loss: 2.734650671482086
Epoch 784 completed out of 1000 loss: 2.734592020511627
Epoch 785 completed out of 1000 loss: 2.7345452308654785
Epoch 786 completed out of 1000 loss: 2.7344892621040344
Epoch 787 completed out of 1000 loss: 2.7344391345977783
Epoch 788 completed out of 1000 loss: 2.7343868613243103
Epoch 789 completed out of 1000 loss: 2.7343409657478333
Epoch 790 completed out of 1000 loss: 2.734291732311249
Epoch 791 completed out of 1000 loss: 2.73424369096756
Epoch 792 completed out of 1000 loss: 2.734194040298462
Epoch 793 completed out of 1000 loss: 2.734150230884552
Epoch 794 completed out of 1000 loss: 2.734105944633484
Epoch 795 completed out of 1000 loss: 2.734058916568756
Epoch 796 completed out of 1000 loss: 2.7340144515037537
Epoch 797 completed out of 1000 loss: 2.733971118927002
Epoch 798 completed out of 1000 loss: 2.7339271306991577
Epoch 799 completed out of 1000 loss: 2.733885645866394
Epoch 800 completed out of 1000 loss: 2.73384290933609
Epoch 801 completed out of 1000 loss: 2.733802556991577
Epoch 802 completed out of 1000 loss: 2.7337611317634583
Epoch 803 completed out of 1000 loss: 2.7337167859077454
Epoch 804 completed out of 1000 loss: 2.733678638935089
Epoch 805 completed out of 1000 loss: 2.7336405515670776
Epoch 806 completed out of 1000 loss: 2.7335956692695618
Epoch 807 completed out of 1000 loss: 2.733556091785431
Epoch 808 completed out of 1000 loss: 2.733520269393921
Epoch 809 completed out of 1000 loss: 2.7334786653518677
Epoch 810 completed out of 1000 loss: 2.733441710472107
Epoch 811 completed out of 1000 loss: 2.7334014177322388
Epoch 812 completed out of 1000 loss: 2.733364522457123
Epoch 813 completed out of 1000 loss: 2.7333277463912964
Epoch 814 completed out of 1000 loss: 2.733291208744049
Epoch 815 completed out of 1000 loss: 2.7332546710968018
Epoch 816 completed out of 1000 loss: 2.7332184314727783
Epoch 817 completed out of 1000 loss: 2.733181059360504
Epoch 818 completed out of 1000 loss: 2.733144521713257
Epoch 819 completed out of 1000 loss: 2.7331093549728394
Epoch 820 completed out of 1000 loss: 2.733069658279419
Epoch 821 completed out of 1000 loss: 2.733036756515503
Epoch 822 completed out of 1000 loss: 2.733002185821533
Epoch 823 completed out of 1000 loss: 2.73296982049942
Epoch 824 completed out of 1000 loss: 2.732933461666107
Epoch 825 completed out of 1000 loss: 2.7329025268554688
Epoch 826 completed out of 1000 loss: 2.732865631580353
Epoch 827 completed out of 1000 loss: 2.7328354120254517
Epoch 828 completed out of 1000 loss: 2.7327991127967834
Epoch 829 completed out of 1000 loss: 2.732765793800354
Epoch 830 completed out of 1000 loss: 2.7327322363853455
Epoch 831 completed out of 1000 loss: 2.7327014207839966
Epoch 832 completed out of 1000 loss: 2.732669174671173
Epoch 833 completed out of 1000 loss: 2.7326369881629944
Epoch 834 completed out of 1000 loss: 2.7326013445854187
Epoch 835 completed out of 1000 loss: 2.7325698137283325
Epoch 836 completed out of 1000 loss: 2.7325363755226135
Epoch 837 completed out of 1000 loss: 2.73250275850296
Epoch 838 completed out of 1000 loss: 2.7324724197387695
Epoch 839 completed out of 1000 loss: 2.732440173625946
Epoch 840 completed out of 1000 loss: 2.7324119806289673
Epoch 841 completed out of 1000 loss: 2.7323790788650513
Epoch 842 completed out of 1000 loss: 2.7323471307754517
Epoch 843 completed out of 1000 loss: 2.7323139905929565
Epoch 844 completed out of 1000 loss: 2.732284426689148
Epoch 845 completed out of 1000 loss: 2.732255458831787
Epoch 846 completed out of 1000 loss: 2.7322251200675964
Epoch 847 completed out of 1000 loss: 2.732194721698761
Epoch 848 completed out of 1000 loss: 2.732166588306427
Epoch 849 completed out of 1000 loss: 2.7321345806121826
Epoch 850 completed out of 1000 loss: 2.732102930545807
Epoch 851 completed out of 1000 loss: 2.7320720553398132
Epoch 852 completed out of 1000 loss: 2.732042074203491
Epoch 853 completed out of 1000 loss: 2.7320127487182617
Epoch 854 completed out of 1000 loss: 2.731983006000519
Epoch 855 completed out of 1000 loss: 2.7319509983062744
Epoch 856 completed out of 1000 loss: 2.7319228053092957
Epoch 857 completed out of 1000 loss: 2.7318928837776184
Epoch 858 completed out of 1000 loss: 2.7318632006645203
Epoch 859 completed out of 1000 loss: 2.7318336367607117
Epoch 860 completed out of 1000 loss: 2.731803774833679
Epoch 861 completed out of 1000 loss: 2.731774866580963
Epoch 862 completed out of 1000 loss: 2.7317452430725098
Epoch 863 completed out of 1000 loss: 2.7317180037498474
Epoch 864 completed out of 1000 loss: 2.7316868901252747
Epoch 865 completed out of 1000 loss: 2.7316567301750183
Epoch 866 completed out of 1000 loss: 2.731628954410553
Epoch 867 completed out of 1000 loss: 2.731601655483246
Epoch 868 completed out of 1000 loss: 2.7315717935562134
Epoch 869 completed out of 1000 loss: 2.731544256210327
Epoch 870 completed out of 1000 loss: 2.73151433467865
Epoch 871 completed out of 1000 loss: 2.7314847707748413
Epoch 872 completed out of 1000 loss: 2.7314566373825073
Epoch 873 completed out of 1000 loss: 2.7314282059669495
Epoch 874 completed out of 1000 loss: 2.7313987016677856
Epoch 875 completed out of 1000 loss: 2.7313682436943054
Epoch 876 completed out of 1000 loss: 2.731341540813446
Epoch 877 completed out of 1000 loss: 2.7313120365142822
Epoch 878 completed out of 1000 loss: 2.731284439563751
Epoch 879 completed out of 1000 loss: 2.7312519550323486
Epoch 880 completed out of 1000 loss: 2.731225848197937
Epoch 881 completed out of 1000 loss: 2.7311967611312866
Epoch 882 completed out of 1000 loss: 2.7311684489250183
Epoch 883 completed out of 1000 loss: 2.731140613555908
Epoch 884 completed out of 1000 loss: 2.7311132550239563
Epoch 885 completed out of 1000 loss: 2.731082499027252
Epoch 886 completed out of 1000 loss: 2.7310546040534973
Epoch 887 completed out of 1000 loss: 2.731024742126465
Epoch 888 completed out of 1000 loss: 2.7309988141059875
Epoch 889 completed out of 1000 loss: 2.730968952178955
Epoch 890 completed out of 1000 loss: 2.730939567089081
Epoch 891 completed out of 1000 loss: 2.7309130430221558
Epoch 892 completed out of 1000 loss: 2.7308859825134277
Epoch 893 completed out of 1000 loss: 2.7308579683303833
Epoch 894 completed out of 1000 loss: 2.7308260202407837
Epoch 895 completed out of 1000 loss: 2.7307976484298706
Epoch 896 completed out of 1000 loss: 2.7307692170143127
Epoch 897 completed out of 1000 loss: 2.7307440638542175
Epoch 898 completed out of 1000 loss: 2.7307141423225403
Epoch 899 completed out of 1000 loss: 2.7306864857673645
Epoch 900 completed out of 1000 loss: 2.730656385421753
Epoch 901 completed out of 1000 loss: 2.7306313514709473
Epoch 902 completed out of 1000 loss: 2.730600655078888
Epoch 903 completed out of 1000 loss: 2.7305732369422913
Epoch 904 completed out of 1000 loss: 2.7305444478988647
Epoch 905 completed out of 1000 loss: 2.73051655292511
Epoch 906 completed out of 1000 loss: 2.730485737323761
Epoch 907 completed out of 1000 loss: 2.730457365512848
Epoch 908 completed out of 1000 loss: 2.730430483818054
Epoch 909 completed out of 1000 loss: 2.7304012179374695
Epoch 910 completed out of 1000 loss: 2.7303736209869385
Epoch 911 completed out of 1000 loss: 2.7303441166877747
Epoch 912 completed out of 1000 loss: 2.7303149700164795
Epoch 913 completed out of 1000 loss: 2.7302868366241455
Epoch 914 completed out of 1000 loss: 2.7302578687667847
Epoch 915 completed out of 1000 loss: 2.730230152606964
Epoch 916 completed out of 1000 loss: 2.7301998138427734
Epoch 917 completed out of 1000 loss: 2.730172097682953
Epoch 918 completed out of 1000 loss: 2.7301458716392517
Epoch 919 completed out of 1000 loss: 2.7301177382469177
Epoch 920 completed out of 1000 loss: 2.730086028575897
Epoch 921 completed out of 1000 loss: 2.730057656764984
Epoch 922 completed out of 1000 loss: 2.7300291061401367
Epoch 923 completed out of 1000 loss: 2.7299998998641968
Epoch 924 completed out of 1000 loss: 2.729972243309021
Epoch 925 completed out of 1000 loss: 2.729941487312317
Epoch 926 completed out of 1000 loss: 2.729914665222168
Epoch 927 completed out of 1000 loss: 2.729887008666992
Epoch 928 completed out of 1000 loss: 2.7298572063446045
Epoch 929 completed out of 1000 loss: 2.729829490184784
Epoch 930 completed out of 1000 loss: 2.729801058769226
Epoch 931 completed out of 1000 loss: 2.7297691106796265
Epoch 932 completed out of 1000 loss: 2.729742109775543
Epoch 933 completed out of 1000 loss: 2.7297122478485107
Epoch 934 completed out of 1000 loss: 2.7296829223632812
Epoch 935 completed out of 1000 loss: 2.729654014110565
Epoch 936 completed out of 1000 loss: 2.729624927043915
Epoch 937 completed out of 1000 loss: 2.7295963168144226
Epoch 938 completed out of 1000 loss: 2.729567289352417
Epoch 939 completed out of 1000 loss: 2.7295404076576233
Epoch 940 completed out of 1000 loss: 2.7295114398002625
Epoch 941 completed out of 1000 loss: 2.7294835448265076
Epoch 942 completed out of 1000 loss: 2.729455053806305
Epoch 943 completed out of 1000 loss: 2.7294243574142456
Epoch 944 completed out of 1000 loss: 2.7293957471847534
Epoch 945 completed out of 1000 loss: 2.729365646839142
Epoch 946 completed out of 1000 loss: 2.7293394207954407
Epoch 947 completed out of 1000 loss: 2.72931045293808
Epoch 948 completed out of 1000 loss: 2.7292838096618652
Epoch 949 completed out of 1000 loss: 2.72925466299057
Epoch 950 completed out of 1000 loss: 2.729225754737854
Epoch 951 completed out of 1000 loss: 2.7291966676712036
Epoch 952 completed out of 1000 loss: 2.7291672229766846
Epoch 953 completed out of 1000 loss: 2.7291393280029297
Epoch 954 completed out of 1000 loss: 2.729112148284912
Epoch 955 completed out of 1000 loss: 2.7290831804275513
Epoch 956 completed out of 1000 loss: 2.7290552854537964
Epoch 957 completed out of 1000 loss: 2.729026675224304
Epoch 958 completed out of 1000 loss: 2.7289977073669434
Epoch 959 completed out of 1000 loss: 2.728968858718872
Epoch 960 completed out of 1000 loss: 2.7289406061172485
Epoch 961 completed out of 1000 loss: 2.7289136052131653
Epoch 962 completed out of 1000 loss: 2.72888445854187
Epoch 963 completed out of 1000 loss: 2.7288549542427063
Epoch 964 completed out of 1000 loss: 2.72882878780365
Epoch 965 completed out of 1000 loss: 2.7288001775741577
Epoch 966 completed out of 1000 loss: 2.7287717461586
Epoch 967 completed out of 1000 loss: 2.728743553161621
Epoch 968 completed out of 1000 loss: 2.7287155985832214
Epoch 969 completed out of 1000 loss: 2.7286866903305054
Epoch 970 completed out of 1000 loss: 2.7286571860313416
Epoch 971 completed out of 1000 loss: 2.728628993034363
Epoch 972 completed out of 1000 loss: 2.728600561618805
Epoch 973 completed out of 1000 loss: 2.7285726070404053
Epoch 974 completed out of 1000 loss: 2.7285441756248474
Epoch 975 completed out of 1000 loss: 2.728514015674591
Epoch 976 completed out of 1000 loss: 2.72848641872406
Epoch 977 completed out of 1000 loss: 2.7284594774246216
Epoch 978 completed out of 1000 loss: 2.7284327149391174
Epoch 979 completed out of 1000 loss: 2.7284046411514282
Epoch 980 completed out of 1000 loss: 2.728374660015106
Epoch 981 completed out of 1000 loss: 2.72834712266922
Epoch 982 completed out of 1000 loss: 2.728318989276886
Epoch 983 completed out of 1000 loss: 2.7282906770706177
Epoch 984 completed out of 1000 loss: 2.7282609939575195
Epoch 985 completed out of 1000 loss: 2.7282326221466064
Epoch 986 completed out of 1000 loss: 2.728204131126404
Epoch 987 completed out of 1000 loss: 2.728177547454834
Epoch 988 completed out of 1000 loss: 2.728149950504303
Epoch 989 completed out of 1000 loss: 2.7281214594841003
Epoch 990 completed out of 1000 loss: 2.728093206882477
Epoch 991 completed out of 1000 loss: 2.728064715862274
Epoch 992 completed out of 1000 loss: 2.7280364632606506
Epoch 993 completed out of 1000 loss: 2.7280056476593018
Epoch 994 completed out of 1000 loss: 2.72797828912735
Epoch 995 completed out of 1000 loss: 2.727948784828186
Epoch 996 completed out of 1000 loss: 2.7279197573661804
Epoch 997 completed out of 1000 loss: 2.72789204120636
Epoch 998 completed out of 1000 loss: 2.7278627157211304
Epoch 999 completed out of 1000 loss: 2.7278358936309814
Tensor("Softmax_4:0", shape=(?, 2), dtype=float32)
[[0.57084125 0.42915875]
 [0.49265644 0.50734353]
 [0.5489681  0.4510319 ]
 ...
 [0.18181583 0.81818414]
 [0.6694376  0.3305624 ]
 [0.482254   0.51774603]]
False
accuracy on train: 0.56753916
Accuracy on test : 0.56228954
[[0. 1.]
 [1. 0.]
 [0. 1.]
 ...
 [0. 1.]
 [1. 0.]
 [1. 0.]]
In [124]:
cm8 = confusion_matrix(df_ye,predy5)
plt.figure(figsize = (10,7))
cm8df = pd.DataFrame(cm8, index = ["1",'0'], columns = ["1",'0'])
seaborn.set(font_scale=1.4)#for label size
seaborn.heatmap(cm8df, annot=True,annot_kws={"size": 16})# font size
Out[124]:
<matplotlib.axes._subplots.AxesSubplot at 0x20f0db93dc8>
In [125]:
fpr, tpr, _ = roc_curve(predy5, df_ye, drop_intermediate=False)
plt.plot(fpr, tpr, color='red')
plt.xlabel('fpr')
plt.ylabel('tpr')
plt.title('ROC curve fr Tensorflow 5')
plt.show()
In [126]:
seq = np.arange(1000).tolist()
plt.plot(seq, losslist2)
plt.show()
In [127]:
plt.plot(seq, losslist3)
plt.show()
In [128]:
plt.plot(seq, losslist4)
plt.show()
In [129]:
plt.plot(seq, losslist5)
plt.show()
In [130]:
print("F1 Score for fold 1 of Tensorflow model:", f1_score(df_ya, predy))
print("F1 Score for fold 2 of Tensorflow model:", f1_score(df_yb, predy2))
print("F1 Score for fold 3 of Tensorflow model:", f1_score(df_yc, predy3))
print("F1 Score for fold 4 of Tensorflow model:", f1_score(df_yd, predy4))
print("F1 Score for fold 5 of Tensorflow model:", f1_score(df_ye, predy5))
F1 Score for fold 1 of Tensorflow model: 0.6384456386936751
F1 Score for fold 2 of Tensorflow model: 0.6008056781124113
F1 Score for fold 3 of Tensorflow model: 0.614875425209157
F1 Score for fold 4 of Tensorflow model: 0.5578477243726073
F1 Score for fold 5 of Tensorflow model: 0.5184070386663578
In [147]:
from sklearn.metrics import classification_report, recall_score, precision_score, precision_recall_curve
In [146]:
# from inspect import signature
# # https://scikit-learn.org/stable/modules/classes.html#sklearn-metrics-metrics
# print(len(df_ya), len(svm1_predy[0]))
# precision, recall, _ = precision_recall_curve(df_ya, svm1_predy[0])
# print(precision, recall, _)
# # step_kwargs = ({'step': 'post'}
# #                if 'step' in signature(plt.fill_between).parameters
# #                else {})
# plt.step(recall, precision, color='red', alpha=0.2, where='post')
# # plt.fill_between(recall, precision, alpha=0.2, color='b', **step_kwargs)
# plt.xlabel('Recall')
# plt.ylabel('Precision')
# plt.ylim([0.0, 1.05])
# plt.xlim([0.0, 1.0])
# # plt.title('2-class Precision-Recall curve: AP={0:0.2f}'.format(average_precision))
9503 9503
[0.51678417 0.63423505 1.        ] [1.        0.5681124 0.       ] [0 1]
Out[146]:
(0.0, 1.0)
In [151]:
print(classification_report(df_ya, svm1_predy[0]))
print(classification_report(df_yb, svm1_predy[1]))
print(classification_report(df_yc, svm1_predy[2]))
print(classification_report(df_yd, svm1_predy[3]))
print(classification_report(df_ye, svm1_predy[4]))

print(classification_report(df_ya, svm5_predy[0]))
print(classification_report(df_yb, svm5_predy[1]))
print(classification_report(df_yc, svm5_predy[2]))
print(classification_report(df_yd, svm5_predy[3]))
print(classification_report(df_ye, svm5_predy[4]))

print(classification_report(df_ya, svm6_predy[0]))
print(classification_report(df_yb, svm6_predy[1]))
print(classification_report(df_yc, svm6_predy[2]))
print(classification_report(df_yd, svm6_predy[3]))
print(classification_report(df_ye, svm6_predy[4]))

print(classification_report(df_ya, knn1_predy[0]))
print(classification_report(df_yb, knn1_predy[1]))
print(classification_report(df_yc, knn1_predy[2]))
print(classification_report(df_yd, knn1_predy[3]))
print(classification_report(df_ye, knn1_predy[4]))

print(classification_report(df_ya, knn2_predy[0]))
print(classification_report(df_yb, knn2_predy[1]))
print(classification_report(df_yc, knn2_predy[2]))
print(classification_report(df_yd, knn2_predy[3]))
print(classification_report(df_ye, knn2_predy[4]))

print(classification_report(df_ya, rfc1_predy[0]))
print(classification_report(df_yb, rfc1_predy[1]))
print(classification_report(df_yc, rfc1_predy[2]))
print(classification_report(df_yd, rfc1_predy[3]))
print(classification_report(df_ye, rfc1_predy[4]))

print(classification_report(df_ya, rfc2_predy[0]))
print(classification_report(df_yb, rfc2_predy[1]))
print(classification_report(df_yc, rfc2_predy[2]))
print(classification_report(df_yd, rfc2_predy[3]))
print(classification_report(df_ye, rfc2_predy[4]))

print(classification_report(df_ya, rfc3_predy[0]))
print(classification_report(df_yb, rfc3_predy[1]))
print(classification_report(df_yc, rfc3_predy[2]))
print(classification_report(df_yd, rfc3_predy[3]))
print(classification_report(df_ye, rfc3_predy[4]))

print(classification_report(df_ya, mlp1_predy[0]))
print(classification_report(df_yb, mlp1_predy[1]))
print(classification_report(df_yc, mlp1_predy[2]))
print(classification_report(df_yd, mlp1_predy[3]))
print(classification_report(df_ye, mlp1_predy[4]))

print(classification_report(df_ya, mlp2_predy[0]))
print(classification_report(df_yb, mlp2_predy[1]))
print(classification_report(df_yc, mlp2_predy[2]))
print(classification_report(df_yd, mlp2_predy[3]))
print(classification_report(df_ye, mlp2_predy[4]))

print(classification_report(df_ya, logr1_predy[0]))
print(classification_report(df_yb, logr1_predy[1]))
print(classification_report(df_yc, logr1_predy[2]))
print(classification_report(df_yd, logr1_predy[3]))
print(classification_report(df_ye, logr1_predy[4]))

print(classification_report(df_ya, logr2_predy[0]))
print(classification_report(df_yb, logr2_predy[1]))
print(classification_report(df_yc, logr2_predy[2]))
print(classification_report(df_yd, logr2_predy[3]))
print(classification_report(df_ye, logr2_predy[4]))

print(classification_report(df_ya, bnb1_predy[0]))
print(classification_report(df_yb, bnb1_predy[1]))
print(classification_report(df_yc, bnb1_predy[2]))
print(classification_report(df_yd, bnb1_predy[3]))
print(classification_report(df_ye, bnb1_predy[4]))

print(classification_report(df_ya, cnb1_predy[0]))
print(classification_report(df_yb, cnb1_predy[1]))
print(classification_report(df_yc, cnb1_predy[2]))
print(classification_report(df_yd, cnb1_predy[3]))
print(classification_report(df_ye, cnb1_predy[4]))

print(classification_report(df_ya, gnb1_predy[0]))
print(classification_report(df_yb, gnb1_predy[1]))
print(classification_report(df_yc, gnb1_predy[2]))
print(classification_report(df_yd, gnb1_predy[3]))
print(classification_report(df_ye, gnb1_predy[4]))

print(classification_report(df_ya, mnb1_predy[0]))
print(classification_report(df_yb, mnb1_predy[1]))
print(classification_report(df_yc, mnb1_predy[2]))
print(classification_report(df_yd, mnb1_predy[3]))
print(classification_report(df_ye, mnb1_predy[4]))
              precision    recall  f1-score   support

           0       0.58      0.65      0.62      4592
           1       0.63      0.57      0.60      4911

    accuracy                           0.61      9503
   macro avg       0.61      0.61      0.61      9503
weighted avg       0.61      0.61      0.61      9503

              precision    recall  f1-score   support

           0       0.60      0.65      0.62      4719
           1       0.62      0.57      0.59      4785

    accuracy                           0.61      9504
   macro avg       0.61      0.61      0.61      9504
weighted avg       0.61      0.61      0.61      9504

              precision    recall  f1-score   support

           0       0.58      0.64      0.61      4647
           1       0.62      0.56      0.59      4857

    accuracy                           0.60      9504
   macro avg       0.60      0.60      0.60      9504
weighted avg       0.60      0.60      0.60      9504

              precision    recall  f1-score   support

           0       0.59      0.64      0.62      4663
           1       0.63      0.58      0.60      4841

    accuracy                           0.61      9504
   macro avg       0.61      0.61      0.61      9504
weighted avg       0.61      0.61      0.61      9504

              precision    recall  f1-score   support

           0       0.59      0.66      0.62      4603
           1       0.64      0.57      0.60      4901

    accuracy                           0.61      9504
   macro avg       0.61      0.61      0.61      9504
weighted avg       0.61      0.61      0.61      9504

              precision    recall  f1-score   support

           0       0.58      0.52      0.55      4592
           1       0.59      0.65      0.62      4911

    accuracy                           0.59      9503
   macro avg       0.59      0.59      0.59      9503
weighted avg       0.59      0.59      0.59      9503

              precision    recall  f1-score   support

           0       0.59      0.52      0.55      4719
           1       0.58      0.65      0.61      4785

    accuracy                           0.58      9504
   macro avg       0.59      0.58      0.58      9504
weighted avg       0.59      0.58      0.58      9504

              precision    recall  f1-score   support

           0       0.58      0.52      0.55      4647
           1       0.58      0.64      0.61      4857

    accuracy                           0.58      9504
   macro avg       0.58      0.58      0.58      9504
weighted avg       0.58      0.58      0.58      9504

              precision    recall  f1-score   support

           0       0.59      0.52      0.55      4663
           1       0.59      0.65      0.62      4841

    accuracy                           0.59      9504
   macro avg       0.59      0.59      0.58      9504
weighted avg       0.59      0.59      0.59      9504

              precision    recall  f1-score   support

           0       0.58      0.51      0.55      4603
           1       0.59      0.66      0.62      4901

    accuracy                           0.59      9504
   macro avg       0.59      0.59      0.58      9504
weighted avg       0.59      0.59      0.59      9504

              precision    recall  f1-score   support

           0       0.59      0.63      0.61      4592
           1       0.63      0.58      0.61      4911

    accuracy                           0.61      9503
   macro avg       0.61      0.61      0.61      9503
weighted avg       0.61      0.61      0.61      9503

              precision    recall  f1-score   support

           0       0.60      0.63      0.62      4719
           1       0.62      0.58      0.60      4785

    accuracy                           0.61      9504
   macro avg       0.61      0.61      0.61      9504
weighted avg       0.61      0.61      0.61      9504

              precision    recall  f1-score   support

           0       0.59      0.64      0.61      4647
           1       0.62      0.58      0.60      4857

    accuracy                           0.61      9504
   macro avg       0.61      0.61      0.61      9504
weighted avg       0.61      0.61      0.61      9504

              precision    recall  f1-score   support

           0       0.60      0.62      0.61      4663
           1       0.62      0.60      0.61      4841

    accuracy                           0.61      9504
   macro avg       0.61      0.61      0.61      9504
weighted avg       0.61      0.61      0.61      9504

              precision    recall  f1-score   support

           0       0.59      0.63      0.61      4603
           1       0.63      0.58      0.60      4901

    accuracy                           0.61      9504
   macro avg       0.61      0.61      0.61      9504
weighted avg       0.61      0.61      0.61      9504

              precision    recall  f1-score   support

           0       0.70      1.00      0.82      4592
           1       1.00      0.60      0.75      4911

    accuracy                           0.79      9503
   macro avg       0.85      0.80      0.79      9503
weighted avg       0.85      0.79      0.78      9503

              precision    recall  f1-score   support

           0       0.71      1.00      0.83      4719
           1       1.00      0.59      0.74      4785

    accuracy                           0.80      9504
   macro avg       0.85      0.80      0.79      9504
weighted avg       0.85      0.80      0.79      9504

              precision    recall  f1-score   support

           0       0.70      1.00      0.82      4647
           1       1.00      0.59      0.74      4857

    accuracy                           0.79      9504
   macro avg       0.85      0.80      0.78      9504
weighted avg       0.85      0.79      0.78      9504

              precision    recall  f1-score   support

           0       0.70      1.00      0.83      4663
           1       1.00      0.59      0.74      4841

    accuracy                           0.79      9504
   macro avg       0.85      0.80      0.78      9504
weighted avg       0.85      0.79      0.78      9504

              precision    recall  f1-score   support

           0       0.53      0.78      0.63      4603
           1       0.63      0.36      0.46      4901

    accuracy                           0.56      9504
   macro avg       0.58      0.57      0.54      9504
weighted avg       0.58      0.56      0.54      9504

              precision    recall  f1-score   support

           0       0.79      0.78      0.78      4592
           1       0.79      0.81      0.80      4911

    accuracy                           0.79      9503
   macro avg       0.79      0.79      0.79      9503
weighted avg       0.79      0.79      0.79      9503

              precision    recall  f1-score   support

           0       0.79      0.78      0.79      4719
           1       0.78      0.80      0.79      4785

    accuracy                           0.79      9504
   macro avg       0.79      0.79      0.79      9504
weighted avg       0.79      0.79      0.79      9504

              precision    recall  f1-score   support

           0       0.79      0.78      0.78      4647
           1       0.79      0.80      0.80      4857

    accuracy                           0.79      9504
   macro avg       0.79      0.79      0.79      9504
weighted avg       0.79      0.79      0.79      9504

              precision    recall  f1-score   support

           0       0.78      0.79      0.79      4663
           1       0.80      0.79      0.79      4841

    accuracy                           0.79      9504
   macro avg       0.79      0.79      0.79      9504
weighted avg       0.79      0.79      0.79      9504

              precision    recall  f1-score   support

           0       0.57      0.57      0.57      4603
           1       0.59      0.60      0.59      4901

    accuracy                           0.58      9504
   macro avg       0.58      0.58      0.58      9504
weighted avg       0.58      0.58      0.58      9504

              precision    recall  f1-score   support

           0       0.97      0.99      0.98      4592
           1       0.99      0.98      0.98      4911

    accuracy                           0.98      9503
   macro avg       0.98      0.98      0.98      9503
weighted avg       0.98      0.98      0.98      9503

              precision    recall  f1-score   support

           0       0.97      0.99      0.98      4719
           1       0.99      0.97      0.98      4785

    accuracy                           0.98      9504
   macro avg       0.98      0.98      0.98      9504
weighted avg       0.98      0.98      0.98      9504

              precision    recall  f1-score   support

           0       0.97      0.99      0.98      4647
           1       0.99      0.97      0.98      4857

    accuracy                           0.98      9504
   macro avg       0.98      0.98      0.98      9504
weighted avg       0.98      0.98      0.98      9504

              precision    recall  f1-score   support

           0       0.97      0.99      0.98      4663
           1       0.99      0.97      0.98      4841

    accuracy                           0.98      9504
   macro avg       0.98      0.98      0.98      9504
weighted avg       0.98      0.98      0.98      9504

              precision    recall  f1-score   support

           0       0.60      0.69      0.64      4603
           1       0.66      0.56      0.61      4901

    accuracy                           0.63      9504
   macro avg       0.63      0.63      0.63      9504
weighted avg       0.63      0.63      0.63      9504

              precision    recall  f1-score   support

           0       0.99      0.99      0.99      4592
           1       0.99      0.99      0.99      4911

    accuracy                           0.99      9503
   macro avg       0.99      0.99      0.99      9503
weighted avg       0.99      0.99      0.99      9503

              precision    recall  f1-score   support

           0       0.99      0.99      0.99      4719
           1       0.99      0.99      0.99      4785

    accuracy                           0.99      9504
   macro avg       0.99      0.99      0.99      9504
weighted avg       0.99      0.99      0.99      9504

              precision    recall  f1-score   support

           0       0.99      1.00      0.99      4647
           1       1.00      0.99      0.99      4857

    accuracy                           0.99      9504
   macro avg       0.99      0.99      0.99      9504
weighted avg       0.99      0.99      0.99      9504

              precision    recall  f1-score   support

           0       0.99      0.99      0.99      4663
           1       0.99      0.99      0.99      4841

    accuracy                           0.99      9504
   macro avg       0.99      0.99      0.99      9504
weighted avg       0.99      0.99      0.99      9504

              precision    recall  f1-score   support

           0       0.63      0.65      0.64      4603
           1       0.66      0.64      0.65      4901

    accuracy                           0.64      9504
   macro avg       0.64      0.64      0.64      9504
weighted avg       0.64      0.64      0.64      9504

              precision    recall  f1-score   support

           0       1.00      1.00      1.00      4592
           1       1.00      1.00      1.00      4911

    accuracy                           1.00      9503
   macro avg       1.00      1.00      1.00      9503
weighted avg       1.00      1.00      1.00      9503

              precision    recall  f1-score   support

           0       1.00      1.00      1.00      4719
           1       1.00      1.00      1.00      4785

    accuracy                           1.00      9504
   macro avg       1.00      1.00      1.00      9504
weighted avg       1.00      1.00      1.00      9504

              precision    recall  f1-score   support

           0       1.00      1.00      1.00      4647
           1       1.00      1.00      1.00      4857

    accuracy                           1.00      9504
   macro avg       1.00      1.00      1.00      9504
weighted avg       1.00      1.00      1.00      9504

              precision    recall  f1-score   support

           0       1.00      1.00      1.00      4663
           1       1.00      1.00      1.00      4841

    accuracy                           1.00      9504
   macro avg       1.00      1.00      1.00      9504
weighted avg       1.00      1.00      1.00      9504

              precision    recall  f1-score   support

           0       0.63      0.65      0.64      4603
           1       0.66      0.64      0.65      4901

    accuracy                           0.65      9504
   macro avg       0.65      0.65      0.65      9504
weighted avg       0.65      0.65      0.65      9504

              precision    recall  f1-score   support

           0       0.57      0.46      0.51      4592
           1       0.57      0.67      0.62      4911

    accuracy                           0.57      9503
   macro avg       0.57      0.57      0.56      9503
weighted avg       0.57      0.57      0.56      9503

              precision    recall  f1-score   support

           0       0.56      0.46      0.50      4719
           1       0.55      0.65      0.60      4785

    accuracy                           0.56      9504
   macro avg       0.56      0.55      0.55      9504
weighted avg       0.56      0.56      0.55      9504

              precision    recall  f1-score   support

           0       0.56      0.46      0.50      4647
           1       0.56      0.65      0.60      4857

    accuracy                           0.56      9504
   macro avg       0.56      0.56      0.55      9504
weighted avg       0.56      0.56      0.55      9504

              precision    recall  f1-score   support

           0       0.56      0.45      0.50      4663
           1       0.55      0.66      0.60      4841

    accuracy                           0.56      9504
   macro avg       0.56      0.55      0.55      9504
weighted avg       0.56      0.56      0.55      9504

              precision    recall  f1-score   support

           0       0.55      0.45      0.50      4603
           1       0.56      0.66      0.61      4901

    accuracy                           0.56      9504
   macro avg       0.56      0.56      0.55      9504
weighted avg       0.56      0.56      0.55      9504

              precision    recall  f1-score   support

           0       0.52      0.88      0.65      4592
           1       0.68      0.23      0.35      4911

    accuracy                           0.55      9503
   macro avg       0.60      0.56      0.50      9503
weighted avg       0.60      0.55      0.49      9503

              precision    recall  f1-score   support

           0       0.54      0.89      0.67      4719
           1       0.69      0.24      0.35      4785

    accuracy                           0.56      9504
   macro avg       0.61      0.57      0.51      9504
weighted avg       0.61      0.56      0.51      9504

              precision    recall  f1-score   support

           0       0.53      0.89      0.66      4647
           1       0.68      0.23      0.35      4857

    accuracy                           0.55      9504
   macro avg       0.60      0.56      0.50      9504
weighted avg       0.61      0.55      0.50      9504

              precision    recall  f1-score   support

           0       0.53      0.88      0.66      4663
           1       0.67      0.23      0.34      4841

    accuracy                           0.55      9504
   macro avg       0.60      0.56      0.50      9504
weighted avg       0.60      0.55      0.50      9504

              precision    recall  f1-score   support

           0       0.52      0.88      0.65      4603
           1       0.67      0.23      0.34      4901

    accuracy                           0.54      9504
   macro avg       0.59      0.55      0.50      9504
weighted avg       0.59      0.54      0.49      9504

              precision    recall  f1-score   support

           0       0.59      0.61      0.60      4592
           1       0.62      0.61      0.62      4911

    accuracy                           0.61      9503
   macro avg       0.61      0.61      0.61      9503
weighted avg       0.61      0.61      0.61      9503

              precision    recall  f1-score   support

           0       0.60      0.61      0.61      4719
           1       0.61      0.60      0.61      4785

    accuracy                           0.61      9504
   macro avg       0.61      0.61      0.61      9504
weighted avg       0.61      0.61      0.61      9504

              precision    recall  f1-score   support

           0       0.59      0.61      0.60      4647
           1       0.61      0.60      0.61      4857

    accuracy                           0.60      9504
   macro avg       0.60      0.60      0.60      9504
weighted avg       0.60      0.60      0.60      9504

              precision    recall  f1-score   support

           0       0.60      0.60      0.60      4663
           1       0.61      0.61      0.61      4841

    accuracy                           0.61      9504
   macro avg       0.61      0.61      0.61      9504
weighted avg       0.61      0.61      0.61      9504

              precision    recall  f1-score   support

           0       0.60      0.61      0.60      4603
           1       0.63      0.61      0.62      4901

    accuracy                           0.61      9504
   macro avg       0.61      0.61      0.61      9504
weighted avg       0.61      0.61      0.61      9504

              precision    recall  f1-score   support

           0       0.56      0.56      0.56      4592
           1       0.59      0.59      0.59      4911

    accuracy                           0.57      9503
   macro avg       0.57      0.57      0.57      9503
weighted avg       0.57      0.57      0.57      9503

              precision    recall  f1-score   support

           0       0.58      0.56      0.57      4719
           1       0.58      0.60      0.59      4785

    accuracy                           0.58      9504
   macro avg       0.58      0.58      0.58      9504
weighted avg       0.58      0.58      0.58      9504

              precision    recall  f1-score   support

           0       0.57      0.56      0.57      4647
           1       0.59      0.59      0.59      4857

    accuracy                           0.58      9504
   macro avg       0.58      0.58      0.58      9504
weighted avg       0.58      0.58      0.58      9504

              precision    recall  f1-score   support

           0       0.57      0.56      0.57      4663
           1       0.58      0.60      0.59      4841

    accuracy                           0.58      9504
   macro avg       0.58      0.58      0.58      9504
weighted avg       0.58      0.58      0.58      9504

              precision    recall  f1-score   support

           0       0.57      0.56      0.56      4603
           1       0.59      0.60      0.60      4901

    accuracy                           0.58      9504
   macro avg       0.58      0.58      0.58      9504
weighted avg       0.58      0.58      0.58      9504

              precision    recall  f1-score   support

           0       0.54      0.20      0.29      4592
           1       0.53      0.84      0.65      4911

    accuracy                           0.53      9503
   macro avg       0.53      0.52      0.47      9503
weighted avg       0.53      0.53      0.48      9503

              precision    recall  f1-score   support

           0       0.58      0.21      0.31      4719
           1       0.52      0.85      0.65      4785

    accuracy                           0.53      9504
   macro avg       0.55      0.53      0.48      9504
weighted avg       0.55      0.53      0.48      9504

              precision    recall  f1-score   support

           0       0.53      0.21      0.30      4647
           1       0.52      0.83      0.64      4857

    accuracy                           0.52      9504
   macro avg       0.53      0.52      0.47      9504
weighted avg       0.53      0.52      0.47      9504

              precision    recall  f1-score   support

           0       0.54      0.19      0.29      4663
           1       0.52      0.84      0.64      4841

    accuracy                           0.52      9504
   macro avg       0.53      0.52      0.46      9504
weighted avg       0.53      0.52      0.47      9504

              precision    recall  f1-score   support

           0       0.56      0.20      0.29      4603
           1       0.53      0.85      0.65      4901

    accuracy                           0.54      9504
   macro avg       0.54      0.53      0.47      9504
weighted avg       0.54      0.54      0.48      9504

              precision    recall  f1-score   support

           0       0.54      0.62      0.58      4592
           1       0.59      0.52      0.55      4911

    accuracy                           0.56      9503
   macro avg       0.57      0.57      0.56      9503
weighted avg       0.57      0.56      0.56      9503

              precision    recall  f1-score   support

           0       0.56      0.62      0.59      4719
           1       0.58      0.52      0.55      4785

    accuracy                           0.57      9504
   macro avg       0.57      0.57      0.57      9504
weighted avg       0.57      0.57      0.57      9504

              precision    recall  f1-score   support

           0       0.55      0.62      0.58      4647
           1       0.59      0.52      0.55      4857

    accuracy                           0.57      9504
   macro avg       0.57      0.57      0.57      9504
weighted avg       0.57      0.57      0.57      9504

              precision    recall  f1-score   support

           0       0.55      0.61      0.58      4663
           1       0.58      0.52      0.55      4841

    accuracy                           0.56      9504
   macro avg       0.56      0.56      0.56      9504
weighted avg       0.57      0.56      0.56      9504

              precision    recall  f1-score   support

           0       0.55      0.61      0.58      4603
           1       0.59      0.53      0.56      4901

    accuracy                           0.57      9504
   macro avg       0.57      0.57      0.57      9504
weighted avg       0.57      0.57      0.57      9504

              precision    recall  f1-score   support

           0       0.57      0.19      0.29      4592
           1       0.53      0.86      0.66      4911

    accuracy                           0.54      9503
   macro avg       0.55      0.53      0.47      9503
weighted avg       0.55      0.54      0.48      9503

              precision    recall  f1-score   support

           0       0.60      0.20      0.30      4719
           1       0.52      0.87      0.65      4785

    accuracy                           0.54      9504
   macro avg       0.56      0.53      0.48      9504
weighted avg       0.56      0.54      0.48      9504

              precision    recall  f1-score   support

           0       0.56      0.19      0.28      4647
           1       0.52      0.86      0.65      4857

    accuracy                           0.53      9504
   macro avg       0.54      0.52      0.47      9504
weighted avg       0.54      0.53      0.47      9504

              precision    recall  f1-score   support

           0       0.57      0.19      0.29      4663
           1       0.53      0.86      0.65      4841

    accuracy                           0.53      9504
   macro avg       0.55      0.53      0.47      9504
weighted avg       0.55      0.53      0.47      9504

              precision    recall  f1-score   support

           0       0.59      0.20      0.29      4603
           1       0.54      0.87      0.66      4901

    accuracy                           0.54      9504
   macro avg       0.56      0.53      0.48      9504
weighted avg       0.56      0.54      0.48      9504

              precision    recall  f1-score   support

           0       0.54      0.61      0.58      4592
           1       0.59      0.52      0.55      4911

    accuracy                           0.56      9503
   macro avg       0.57      0.57      0.56      9503
weighted avg       0.57      0.56      0.56      9503

              precision    recall  f1-score   support

           0       0.56      0.62      0.59      4719
           1       0.58      0.52      0.55      4785

    accuracy                           0.57      9504
   macro avg       0.57      0.57      0.57      9504
weighted avg       0.57      0.57      0.57      9504

              precision    recall  f1-score   support

           0       0.55      0.62      0.58      4647
           1       0.59      0.52      0.55      4857

    accuracy                           0.57      9504
   macro avg       0.57      0.57      0.57      9504
weighted avg       0.57      0.57      0.57      9504

              precision    recall  f1-score   support

           0       0.55      0.61      0.58      4663
           1       0.58      0.52      0.55      4841

    accuracy                           0.56      9504
   macro avg       0.56      0.56      0.56      9504
weighted avg       0.57      0.56      0.56      9504

              precision    recall  f1-score   support

           0       0.55      0.61      0.58      4603
           1       0.59      0.53      0.56      4901

    accuracy                           0.57      9504
   macro avg       0.57      0.57      0.57      9504
weighted avg       0.57      0.57      0.57      9504

In [ ]: